From 851fa47ece564ef99843603629df5690837a843d Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 19:36:58 +0000 Subject: [PATCH 1/4] SDK regeneration --- .gitignore | 4 +- .mock/definition/__package__.yml | 425 ++++-- .mock/definition/annotations.yml | 5 + .mock/definition/comments.yml | 4 + .mock/definition/dataManager.yml | 2 + .mock/definition/exportStorage.yml | 1 + .mock/definition/exportStorage/azure.yml | 5 + .mock/definition/exportStorage/gcs.yml | 5 + .mock/definition/exportStorage/local.yml | 5 + .mock/definition/exportStorage/redis.yml | 5 + .mock/definition/exportStorage/s3.yml | 5 + .mock/definition/exportStorage/s3S.yml | 4 + .mock/definition/files.yml | 3 + .mock/definition/importStorage.yml | 1 + .mock/definition/importStorage/azure.yml | 5 + .mock/definition/importStorage/gcs.yml | 5 + .mock/definition/importStorage/local.yml | 5 + .mock/definition/importStorage/redis.yml | 5 + .mock/definition/importStorage/s3.yml | 5 + .mock/definition/importStorage/s3S.yml | 5 + .mock/definition/jwtSettings.yml | 2 + .mock/definition/labels.yml | 4 + .mock/definition/ml.yml | 4 + .mock/definition/modelProviders.yml | 4 + .mock/definition/organizations.yml | 5 + .mock/definition/organizations/members.yml | 2 + .mock/definition/predictions.yml | 5 + .mock/definition/projects.yml | 7 + .mock/definition/projects/exports.yml | 7 + .mock/definition/projects/labels.yml | 4 + .mock/definition/projects/pauses.yml | 4 + .mock/definition/prompts.yml | 6 + .mock/definition/prompts/indicators.yml | 2 + .mock/definition/prompts/runs.yml | 2 + .mock/definition/prompts/versions.yml | 7 + .mock/definition/tasks.yml | 10 +- .mock/definition/tokens.yml | 4 + .mock/definition/users.yml | 9 + .mock/definition/versions.yml | 1 + .mock/definition/views.yml | 4 + .mock/definition/webhooks.yml | 5 + .mock/definition/workspaces.yml | 4 + .mock/definition/workspaces/members.yml | 2 + .mock/fern.config.json | 2 +- poetry.lock | 381 ++--- pyproject.toml | 28 +- requirements.txt | 20 + src/label_studio_sdk/__init__.py | 2 + src/label_studio_sdk/actions/__init__.py | 2 + src/label_studio_sdk/actions/client.py | 138 +- src/label_studio_sdk/actions/raw_client.py | 223 +++ .../actions/types/__init__.py | 2 + .../types/actions_create_request_filters.py | 8 +- ...tions_create_request_filters_items_item.py | 8 +- .../actions_create_request_selected_items.py | 3 +- ..._create_request_selected_items_excluded.py | 8 +- ..._create_request_selected_items_included.py | 8 +- src/label_studio_sdk/annotations/__init__.py | 2 + src/label_studio_sdk/annotations/client.py | 380 ++--- .../annotations/raw_client.py | 794 +++++++++++ .../annotations/types/__init__.py | 2 + ...ions_create_bulk_request_selected_items.py | 8 +- .../annotations_create_bulk_response_item.py | 4 +- src/label_studio_sdk/base_client.py | 73 +- src/label_studio_sdk/comments/__init__.py | 2 + src/label_studio_sdk/comments/client.py | 279 +--- src/label_studio_sdk/comments/raw_client.py | 529 +++++++ src/label_studio_sdk/core/__init__.py | 5 + src/label_studio_sdk/core/api_error.py | 18 +- src/label_studio_sdk/core/force_multipart.py | 16 + src/label_studio_sdk/core/http_client.py | 108 +- src/label_studio_sdk/core/http_response.py | 55 + src/label_studio_sdk/core/jsonable_encoder.py | 1 - src/label_studio_sdk/core/pagination.py | 96 +- .../core/pydantic_utilities.py | 183 +-- src/label_studio_sdk/core/serialization.py | 10 +- src/label_studio_sdk/errors/__init__.py | 2 + .../errors/bad_request_error.py | 7 +- .../errors/internal_server_error.py | 6 +- .../errors/not_found_error.py | 7 +- .../errors/unauthorized_error.py | 7 +- .../export_storage/__init__.py | 2 + .../export_storage/azure/__init__.py | 2 + .../export_storage/azure/client.py | 423 ++---- .../export_storage/azure/raw_client.py | 881 ++++++++++++ .../export_storage/azure/types/__init__.py | 2 + .../azure/types/azure_create_response.py | 4 +- .../azure/types/azure_update_response.py | 4 +- src/label_studio_sdk/export_storage/client.py | 126 +- .../export_storage/gcs/__init__.py | 2 + .../export_storage/gcs/client.py | 423 ++---- .../export_storage/gcs/raw_client.py | 881 ++++++++++++ .../export_storage/gcs/types/__init__.py | 2 + .../gcs/types/gcs_create_response.py | 4 +- .../gcs/types/gcs_update_response.py | 4 +- .../export_storage/local/__init__.py | 2 + .../export_storage/local/client.py | 399 ++---- .../export_storage/local/raw_client.py | 821 +++++++++++ .../export_storage/local/types/__init__.py | 2 + .../local/types/local_create_response.py | 4 +- .../local/types/local_update_response.py | 4 +- .../export_storage/raw_client.py | 93 ++ .../export_storage/redis/__init__.py | 2 + .../export_storage/redis/client.py | 435 ++---- .../export_storage/redis/raw_client.py | 911 ++++++++++++ .../export_storage/redis/types/__init__.py | 2 + .../redis/types/redis_create_response.py | 4 +- .../redis/types/redis_update_response.py | 4 +- .../export_storage/s3/__init__.py | 2 + .../export_storage/s3/client.py | 471 ++----- .../export_storage/s3/raw_client.py | 999 +++++++++++++ .../export_storage/s3/types/__init__.py | 2 + .../s3/types/s3create_response.py | 4 +- .../s3/types/s3update_response.py | 4 +- .../export_storage/s3s/__init__.py | 2 + .../export_storage/s3s/client.py | 403 ++---- .../export_storage/s3s/raw_client.py | 827 +++++++++++ .../export_storage/types/__init__.py | 2 + ...export_storage_list_types_response_item.py | 4 +- src/label_studio_sdk/files/__init__.py | 2 + src/label_studio_sdk/files/client.py | 257 +--- src/label_studio_sdk/files/raw_client.py | 523 +++++++ .../import_storage/__init__.py | 2 + .../import_storage/azure/__init__.py | 2 + .../import_storage/azure/client.py | 459 ++---- .../import_storage/azure/raw_client.py | 981 +++++++++++++ .../import_storage/azure/types/__init__.py | 2 + .../azure/types/azure_create_response.py | 4 +- .../azure/types/azure_update_response.py | 4 +- src/label_studio_sdk/import_storage/client.py | 126 +- .../import_storage/gcs/__init__.py | 2 + .../import_storage/gcs/client.py | 459 ++---- .../import_storage/gcs/raw_client.py | 981 +++++++++++++ .../import_storage/gcs/types/__init__.py | 2 + .../gcs/types/gcs_create_response.py | 4 +- .../gcs/types/gcs_update_response.py | 4 +- .../import_storage/local/__init__.py | 2 + .../import_storage/local/client.py | 399 ++---- .../import_storage/local/raw_client.py | 827 +++++++++++ .../import_storage/local/types/__init__.py | 2 + .../local/types/local_create_response.py | 4 +- .../local/types/local_update_response.py | 4 +- .../import_storage/raw_client.py | 93 ++ .../import_storage/redis/__init__.py | 2 + .../import_storage/redis/client.py | 435 ++---- .../import_storage/redis/raw_client.py | 917 ++++++++++++ .../import_storage/redis/types/__init__.py | 2 + .../redis/types/redis_create_response.py | 4 +- .../redis/types/redis_update_response.py | 4 +- .../import_storage/s3/__init__.py | 2 + .../import_storage/s3/client.py | 519 ++----- .../import_storage/s3/raw_client.py | 1129 +++++++++++++++ .../import_storage/s3/types/__init__.py | 2 + .../s3/types/s3create_response.py | 4 +- .../s3/types/s3update_response.py | 4 +- .../import_storage/s3s/__init__.py | 2 + .../import_storage/s3s/client.py | 491 ++----- .../import_storage/s3s/raw_client.py | 1047 ++++++++++++++ .../import_storage/types/__init__.py | 2 + ...import_storage_list_types_response_item.py | 4 +- src/label_studio_sdk/jwt_settings/__init__.py | 2 + src/label_studio_sdk/jwt_settings/client.py | 128 +- .../jwt_settings/raw_client.py | 212 +++ src/label_studio_sdk/ml/__init__.py | 2 + src/label_studio_sdk/ml/client.py | 462 ++---- src/label_studio_sdk/ml/raw_client.py | 968 +++++++++++++ src/label_studio_sdk/ml/types/__init__.py | 2 + .../ml/types/ml_create_response.py | 4 +- .../ml/types/ml_update_response.py | 4 +- .../model_providers/__init__.py | 2 + .../model_providers/client.py | 374 ++--- .../model_providers/raw_client.py | 706 ++++++++++ src/label_studio_sdk/predictions/__init__.py | 2 + src/label_studio_sdk/predictions/client.py | 271 +--- .../predictions/raw_client.py | 573 ++++++++ src/label_studio_sdk/projects/__init__.py | 2 + src/label_studio_sdk/projects/client.py | 563 ++------ .../projects/exports/__init__.py | 2 + .../projects/exports/client.py | 494 ++----- .../projects/exports/raw_client.py | 1038 ++++++++++++++ .../projects/exports/types/__init__.py | 2 + .../exports/types/exports_convert_response.py | 6 +- .../exports_list_formats_response_item.py | 6 +- .../projects/pauses/__init__.py | 2 + .../projects/pauses/client.py | 261 +--- .../projects/pauses/raw_client.py | 543 +++++++ src/label_studio_sdk/projects/raw_client.py | 1245 +++++++++++++++++ .../projects/types/__init__.py | 2 + .../types/projects_create_response.py | 4 +- .../types/projects_import_tasks_response.py | 4 +- .../projects/types/projects_list_response.py | 6 +- .../types/projects_update_response.py | 4 +- src/label_studio_sdk/prompts/__init__.py | 2 + src/label_studio_sdk/prompts/client.py | 503 ++----- .../prompts/indicators/__init__.py | 2 + .../prompts/indicators/client.py | 117 +- .../prompts/indicators/raw_client.py | 183 +++ src/label_studio_sdk/prompts/raw_client.py | 890 ++++++++++++ src/label_studio_sdk/prompts/runs/__init__.py | 2 + src/label_studio_sdk/prompts/runs/client.py | 190 +-- .../prompts/runs/raw_client.py | 348 +++++ .../prompts/runs/types/__init__.py | 2 + .../prompts/types/__init__.py | 2 + ...ictions_request_failed_predictions_item.py | 4 +- ...ompts_batch_failed_predictions_response.py | 4 +- ..._batch_predictions_request_results_item.py | 4 +- .../prompts_batch_predictions_response.py | 4 +- .../prompts/versions/__init__.py | 2 + .../prompts/versions/client.py | 502 ++----- .../prompts/versions/raw_client.py | 1008 +++++++++++++ src/label_studio_sdk/tasks/__init__.py | 2 + src/label_studio_sdk/tasks/client.py | 400 +----- src/label_studio_sdk/tasks/raw_client.py | 816 +++++++++++ src/label_studio_sdk/tasks/types/__init__.py | 2 + .../tasks/types/tasks_list_response.py | 6 +- src/label_studio_sdk/tokens/__init__.py | 2 + src/label_studio_sdk/tokens/client.py | 329 +---- src/label_studio_sdk/tokens/raw_client.py | 495 +++++++ src/label_studio_sdk/types/__init__.py | 2 + .../types/access_token_response.py | 6 +- src/label_studio_sdk/types/annotation.py | 6 +- .../types/annotation_completed_by.py | 1 + .../types/annotation_filter_options.py | 4 +- .../types/annotations_dm_field.py | 6 +- .../types/api_token_response.py | 6 +- .../types/azure_blob_export_storage.py | 6 +- .../types/azure_blob_import_storage.py | 6 +- src/label_studio_sdk/types/base_task.py | 8 +- src/label_studio_sdk/types/base_user.py | 6 +- src/label_studio_sdk/types/comment.py | 6 +- .../types/converted_format.py | 6 +- .../types/data_manager_task_serializer.py | 10 +- ...ata_manager_task_serializer_drafts_item.py | 6 +- ...anager_task_serializer_predictions_item.py | 6 +- src/label_studio_sdk/types/export.py | 10 +- src/label_studio_sdk/types/export_snapshot.py | 14 +- src/label_studio_sdk/types/file_upload.py | 4 +- src/label_studio_sdk/types/filter.py | 4 +- src/label_studio_sdk/types/filter_group.py | 6 +- .../types/gcs_export_storage.py | 6 +- .../types/gcs_import_storage.py | 6 +- src/label_studio_sdk/types/inference_run.py | 10 +- .../types/inference_run_cost_estimate.py | 4 +- .../types/jwt_settings_response.py | 6 +- .../types/key_indicator_value.py | 4 +- src/label_studio_sdk/types/key_indicators.py | 1 + .../types/key_indicators_item.py | 6 +- ...ey_indicators_item_additional_kpis_item.py | 4 +- .../key_indicators_item_extra_kpis_item.py | 4 +- .../types/local_files_export_storage.py | 6 +- .../types/local_files_import_storage.py | 6 +- src/label_studio_sdk/types/ml_backend.py | 8 +- .../types/model_provider_connection.py | 14 +- src/label_studio_sdk/types/pause.py | 8 +- src/label_studio_sdk/types/prediction.py | 6 +- src/label_studio_sdk/types/project.py | 10 +- src/label_studio_sdk/types/project_import.py | 8 +- .../types/project_label_config.py | 6 +- src/label_studio_sdk/types/prompt.py | 10 +- .../types/prompt_associated_projects_item.py | 1 + .../prompt_associated_projects_item_id.py | 4 +- src/label_studio_sdk/types/prompt_version.py | 10 +- .../types/redis_export_storage.py | 6 +- .../types/redis_import_storage.py | 6 +- .../types/refined_prompt_response.py | 6 +- .../types/rotate_token_response.py | 6 +- .../types/s3export_storage.py | 8 +- .../types/s3import_storage.py | 8 +- .../types/s3s_export_storage.py | 6 +- .../types/s3s_import_storage.py | 8 +- .../types/serialization_option.py | 4 +- .../types/serialization_options.py | 6 +- src/label_studio_sdk/types/task.py | 6 +- .../types/task_filter_options.py | 4 +- src/label_studio_sdk/types/user_simple.py | 4 +- src/label_studio_sdk/types/view.py | 6 +- src/label_studio_sdk/types/webhook.py | 6 +- .../types/webhook_serializer_for_update.py | 6 +- src/label_studio_sdk/types/workspace.py | 6 +- src/label_studio_sdk/users/__init__.py | 2 + src/label_studio_sdk/users/client.py | 437 ++---- src/label_studio_sdk/users/raw_client.py | 833 +++++++++++ src/label_studio_sdk/users/types/__init__.py | 2 + .../users/types/users_get_token_response.py | 4 +- .../users/types/users_reset_token_response.py | 4 +- src/label_studio_sdk/versions/__init__.py | 2 + src/label_studio_sdk/versions/client.py | 74 +- src/label_studio_sdk/versions/raw_client.py | 91 ++ .../versions/types/__init__.py | 2 + .../versions/types/versions_get_response.py | 4 +- src/label_studio_sdk/views/__init__.py | 2 + src/label_studio_sdk/views/client.py | 312 +---- src/label_studio_sdk/views/raw_client.py | 574 ++++++++ src/label_studio_sdk/views/types/__init__.py | 2 + .../views/types/views_create_request_data.py | 6 +- .../views_create_request_data_filters.py | 8 +- ..._create_request_data_filters_items_item.py | 8 +- .../views/types/views_update_request_data.py | 6 +- .../views_update_request_data_filters.py | 8 +- ..._update_request_data_filters_items_item.py | 8 +- src/label_studio_sdk/webhooks/__init__.py | 2 + src/label_studio_sdk/webhooks/client.py | 391 ++---- src/label_studio_sdk/webhooks/raw_client.py | 824 +++++++++++ .../webhooks/types/__init__.py | 2 + src/label_studio_sdk/workspaces/__init__.py | 2 + src/label_studio_sdk/workspaces/client.py | 296 +--- .../workspaces/members/__init__.py | 2 + .../workspaces/members/client.py | 173 +-- .../workspaces/members/raw_client.py | 290 ++++ .../workspaces/members/types/__init__.py | 2 + .../members/types/members_create_response.py | 4 +- .../types/members_list_response_item.py | 4 +- src/label_studio_sdk/workspaces/raw_client.py | 561 ++++++++ tests/__init__.py | 2 - tests/conftest.py | 18 - tests/custom/test_client.py | 2 +- tests/export_storage/__init__.py | 2 - tests/export_storage/test_azure.py | 251 ---- tests/export_storage/test_gcs.py | 251 ---- tests/export_storage/test_local.py | 225 --- tests/export_storage/test_redis.py | 261 ---- tests/export_storage/test_s3.py | 291 ---- tests/export_storage/test_s3s.py | 175 --- tests/import_storage/__init__.py | 2 - tests/import_storage/test_azure.py | 269 ---- tests/import_storage/test_gcs.py | 269 ---- tests/import_storage/test_local.py | 219 --- tests/import_storage/test_redis.py | 255 ---- tests/import_storage/test_s3.py | 319 ----- tests/import_storage/test_s3s.py | 329 ----- tests/projects/__init__.py | 2 - tests/projects/test_exports.py | 205 --- tests/projects/test_pauses.py | 154 -- tests/prompts/__init__.py | 2 - tests/prompts/test_indicators.py | 47 - tests/prompts/test_runs.py | 74 - tests/prompts/test_versions.py | 280 ---- tests/test_actions.py | 59 - tests/test_annotations.py | 464 ------ tests/test_comments.py | 154 -- tests/test_export_storage.py | 16 - tests/test_files.py | 75 - tests/test_import_storage.py | 16 - tests/test_jwt_settings.py | 44 - tests/test_ml.py | 229 --- tests/test_model_providers.py | 194 --- tests/test_predictions.py | 404 ------ tests/test_projects.py | 282 ---- tests/test_prompts.py | 180 --- tests/test_tasks.py | 351 ----- tests/test_tokens.py | 62 - tests/test_users.py | 226 --- tests/test_versions.py | 36 - tests/test_views.py | 175 --- tests/test_workspaces.py | 154 -- tests/utilities.py | 162 --- tests/utils/assets/models/__init__.py | 2 +- tests/utils/assets/models/circle.py | 2 +- .../assets/models/object_with_defaults.py | 1 - .../models/object_with_optional_field.py | 9 +- tests/utils/assets/models/shape.py | 6 +- tests/utils/assets/models/square.py | 2 +- .../assets/models/undiscriminated_shape.py | 1 + tests/utils/test_query_encoding.py | 2 +- tests/utils/test_serialization.py | 6 +- tests/workspaces/__init__.py | 2 - tests/workspaces/test_members.py | 39 - 367 files changed, 30523 insertions(+), 17687 deletions(-) create mode 100644 requirements.txt create mode 100644 src/label_studio_sdk/actions/raw_client.py create mode 100644 src/label_studio_sdk/annotations/raw_client.py create mode 100644 src/label_studio_sdk/comments/raw_client.py create mode 100644 src/label_studio_sdk/core/force_multipart.py create mode 100644 src/label_studio_sdk/core/http_response.py create mode 100644 src/label_studio_sdk/export_storage/azure/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/gcs/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/local/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/redis/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/s3/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/s3s/raw_client.py create mode 100644 src/label_studio_sdk/files/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/azure/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/gcs/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/local/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/redis/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/s3/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/s3s/raw_client.py create mode 100644 src/label_studio_sdk/jwt_settings/raw_client.py create mode 100644 src/label_studio_sdk/ml/raw_client.py create mode 100644 src/label_studio_sdk/model_providers/raw_client.py create mode 100644 src/label_studio_sdk/predictions/raw_client.py create mode 100644 src/label_studio_sdk/projects/exports/raw_client.py create mode 100644 src/label_studio_sdk/projects/pauses/raw_client.py create mode 100644 src/label_studio_sdk/projects/raw_client.py create mode 100644 src/label_studio_sdk/prompts/indicators/raw_client.py create mode 100644 src/label_studio_sdk/prompts/raw_client.py create mode 100644 src/label_studio_sdk/prompts/runs/raw_client.py create mode 100644 src/label_studio_sdk/prompts/versions/raw_client.py create mode 100644 src/label_studio_sdk/tasks/raw_client.py create mode 100644 src/label_studio_sdk/tokens/raw_client.py create mode 100644 src/label_studio_sdk/users/raw_client.py create mode 100644 src/label_studio_sdk/versions/raw_client.py create mode 100644 src/label_studio_sdk/views/raw_client.py create mode 100644 src/label_studio_sdk/webhooks/raw_client.py create mode 100644 src/label_studio_sdk/workspaces/members/raw_client.py create mode 100644 src/label_studio_sdk/workspaces/raw_client.py delete mode 100644 tests/__init__.py delete mode 100644 tests/conftest.py delete mode 100644 tests/export_storage/__init__.py delete mode 100644 tests/export_storage/test_azure.py delete mode 100644 tests/export_storage/test_gcs.py delete mode 100644 tests/export_storage/test_local.py delete mode 100644 tests/export_storage/test_redis.py delete mode 100644 tests/export_storage/test_s3.py delete mode 100644 tests/export_storage/test_s3s.py delete mode 100644 tests/import_storage/__init__.py delete mode 100644 tests/import_storage/test_azure.py delete mode 100644 tests/import_storage/test_gcs.py delete mode 100644 tests/import_storage/test_local.py delete mode 100644 tests/import_storage/test_redis.py delete mode 100644 tests/import_storage/test_s3.py delete mode 100644 tests/import_storage/test_s3s.py delete mode 100644 tests/projects/__init__.py delete mode 100644 tests/projects/test_exports.py delete mode 100644 tests/projects/test_pauses.py delete mode 100644 tests/prompts/__init__.py delete mode 100644 tests/prompts/test_indicators.py delete mode 100644 tests/prompts/test_runs.py delete mode 100644 tests/prompts/test_versions.py delete mode 100644 tests/test_actions.py delete mode 100644 tests/test_annotations.py delete mode 100644 tests/test_comments.py delete mode 100644 tests/test_export_storage.py delete mode 100644 tests/test_files.py delete mode 100644 tests/test_import_storage.py delete mode 100644 tests/test_jwt_settings.py delete mode 100644 tests/test_ml.py delete mode 100644 tests/test_model_providers.py delete mode 100644 tests/test_predictions.py delete mode 100644 tests/test_projects.py delete mode 100644 tests/test_prompts.py delete mode 100644 tests/test_tasks.py delete mode 100644 tests/test_tokens.py delete mode 100644 tests/test_users.py delete mode 100644 tests/test_versions.py delete mode 100644 tests/test_views.py delete mode 100644 tests/test_workspaces.py delete mode 100644 tests/utilities.py delete mode 100644 tests/workspaces/__init__.py delete mode 100644 tests/workspaces/test_members.py diff --git a/.gitignore b/.gitignore index 0da665fee..d2e4ca808 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ -dist/ .mypy_cache/ +.ruff_cache/ __pycache__/ +dist/ poetry.toml -.ruff_cache/ diff --git a/.mock/definition/__package__.yml b/.mock/definition/__package__.yml index 480f0d978..2dc41342e 100644 --- a/.mock/definition/__package__.yml +++ b/.mock/definition/__package__.yml @@ -50,19 +50,23 @@ types: openapi: openapi/openapi.yaml Annotation: properties: - id: optional + id: + type: optional + access: read-only result: type: optional>> docs: List of annotation results for the task created_username: type: optional docs: Username string + access: read-only created_ago: type: optional docs: Time delta from creation time default: '' validation: minLength: 1 + access: read-only completed_by: optional unique_id: type: optional @@ -77,9 +81,11 @@ types: created_at: type: optional docs: Creation time + access: read-only updated_at: type: optional docs: Last updated time + access: read-only draft_created_at: type: optional docs: Draft creation time @@ -116,7 +122,9 @@ types: openapi: openapi/openapi.yaml BaseUser: properties: - id: optional + id: + type: optional + access: read-only first_name: type: optional validation: @@ -135,9 +143,15 @@ types: validation: format: email maxLength: 254 - last_activity: optional - avatar: optional - initials: optional + last_activity: + type: optional + access: read-only + avatar: + type: optional + access: read-only + initials: + type: optional + access: read-only phone: type: optional validation: @@ -151,7 +165,9 @@ types: openapi: openapi/openapi.yaml Filter: properties: - id: optional + id: + type: optional + access: read-only index: type: optional docs: To keep filter order @@ -180,7 +196,9 @@ types: openapi: openapi/openapi.yaml FilterGroup: properties: - id: optional + id: + type: optional + access: read-only filters: list conjunction: type: string @@ -192,7 +210,9 @@ types: openapi: openapi/openapi.yaml View: properties: - id: optional + id: + type: optional + access: read-only filter_group: optional data: type: optional> @@ -213,8 +233,12 @@ types: openapi: openapi/openapi.yaml FileUpload: properties: - id: optional - file: optional + id: + type: optional + access: read-only + file: + type: optional + access: read-only source: openapi: openapi/openapi.yaml OrganizationInvite: @@ -231,8 +255,12 @@ types: openapi: openapi/openapi.yaml LabelLink: properties: - id: optional - annotations_count: optional + id: + type: optional + access: read-only + annotations_count: + type: optional + access: read-only from_name: type: string docs: Tag name @@ -245,14 +273,20 @@ types: openapi: openapi/openapi.yaml Label: properties: - id: optional - links: optional> + id: + type: optional + access: read-only + links: + type: optional> + access: read-only created_at: type: optional docs: Time of label creation + access: read-only updated_at: type: optional docs: Time of label modification + access: read-only value: type: map docs: Label value @@ -275,12 +309,16 @@ types: type: optional docs: User who approved this label organization: integer - projects: optional> + projects: + type: optional> + access: read-only source: openapi: openapi/openapi.yaml LabelCreate: properties: - id: optional + id: + type: optional + access: read-only created_by: optional organization: optional project: integer @@ -291,9 +329,11 @@ types: created_at: type: optional docs: Time of label creation + access: read-only updated_at: type: optional docs: Time of label modification + access: read-only value: type: map docs: Label value @@ -312,7 +352,9 @@ types: approved_by: type: optional docs: User who approved this label - projects: optional> + projects: + type: optional> + access: read-only source: openapi: openapi/openapi.yaml MlBackendState: @@ -334,9 +376,13 @@ types: openapi: openapi/openapi.yaml MlBackend: properties: - id: optional + id: + type: optional + access: read-only state: optional - readable_state: optional + readable_state: + type: optional + access: read-only is_interactive: type: optional docs: >- @@ -358,7 +404,9 @@ types: type: optional docs: HTTP Basic Auth user basic_auth_pass: optional - basic_auth_pass_is_set: optional + basic_auth_pass_is_set: + type: optional + access: read-only description: type: optional docs: Description for the machine learning backend @@ -371,8 +419,12 @@ types: timeout: type: optional docs: Response model timeout - created_at: optional - updated_at: optional + created_at: + type: optional + access: read-only + updated_at: + type: optional + access: read-only auto_update: type: optional docs: >- @@ -383,7 +435,9 @@ types: openapi: openapi/openapi.yaml OrganizationId: properties: - id: optional + id: + type: optional + access: read-only title: type: string validation: @@ -394,12 +448,16 @@ types: validation: format: email maxLength: 254 - created_at: optional + created_at: + type: optional + access: read-only source: openapi: openapi/openapi.yaml Organization: properties: - id: optional + id: + type: optional + access: read-only title: type: string validation: @@ -409,20 +467,28 @@ types: type: optional validation: maxLength: 256 - created_at: optional - updated_at: optional + created_at: + type: optional + access: read-only + updated_at: + type: optional + access: read-only contact_info: type: optional validation: format: email maxLength: 254 created_by: optional - users: optional> + users: + type: optional> + access: read-only source: openapi: openapi/openapi.yaml UserSerializerWithProjects: properties: - id: optional + id: + type: optional + access: read-only first_name: type: optional validation: @@ -441,9 +507,15 @@ types: validation: format: email maxLength: 254 - last_activity: optional - avatar: optional - initials: optional + last_activity: + type: optional + access: read-only + avatar: + type: optional + access: read-only + initials: + type: optional + access: read-only phone: type: optional validation: @@ -453,13 +525,19 @@ types: type: optional docs: Allow sending newsletters to user date_joined: optional - created_projects: optional - contributed_to_projects: optional + created_projects: + type: optional + access: read-only + contributed_to_projects: + type: optional + access: read-only source: openapi: openapi/openapi.yaml OrganizationMemberUser: properties: - id: optional + id: + type: optional + access: read-only organization: type: integer docs: Organization ID @@ -468,7 +546,9 @@ types: openapi: openapi/openapi.yaml Prediction: properties: - id: optional + id: + type: optional + access: read-only result: docs: List of prediction results for the task type: list> @@ -484,6 +564,7 @@ types: default: '' validation: minLength: 1 + access: read-only score: type: optional docs: Prediction score @@ -496,8 +577,12 @@ types: mislabeling: type: optional docs: Related task mislabeling score - created_at: optional - updated_at: optional + created_at: + type: optional + access: read-only + updated_at: + type: optional + access: read-only model: type: optional docs: An ML Backend instance that created the prediction. @@ -511,7 +596,9 @@ types: UserSimple: docs: Project owner properties: - id: optional + id: + type: optional + access: read-only first_name: type: optional validation: @@ -525,7 +612,9 @@ types: validation: format: email maxLength: 254 - avatar: optional + avatar: + type: optional + access: read-only source: openapi: openapi/openapi.yaml ProjectSampling: @@ -549,7 +638,9 @@ types: openapi: openapi/openapi.yaml Project: properties: - id: optional + id: + type: optional + access: read-only title: type: optional docs: Project name. Must be between 3 and 50 characters long. @@ -609,7 +700,9 @@ types: type: optional docs: Whether or not the project is in the middle of being created created_by: optional - created_at: optional + created_at: + type: optional + access: read-only min_annotations_to_start_training: type: optional docs: >- @@ -618,38 +711,46 @@ types: start_training_on_annotation_update: type: optional docs: Start model training after any annotations are submitted or updated + access: read-only show_collab_predictions: type: optional docs: If set, the annotator can view model predictions num_tasks_with_annotations: type: optional docs: Tasks with annotations count + access: read-only task_number: type: optional docs: Total task number in project + access: read-only useful_annotation_number: type: optional docs: >- Useful annotation number in project not including skipped_annotations_number and ground_truth_number. Total annotations = annotation_number + skipped_annotations_number + ground_truth_number + access: read-only ground_truth_number: type: optional docs: Honeypot annotation number in project + access: read-only skipped_annotations_number: type: optional docs: Skipped by collaborators annotation number in project + access: read-only total_annotations_number: type: optional docs: >- Total annotations number in project including skipped_annotations_number and ground_truth_number. + access: read-only total_predictions_number: type: optional docs: >- Total predictions number in project including skipped_annotations_number, ground_truth_number, and useful_annotation_number. + access: read-only sampling: optional show_ground_truth_first: optional show_overlap_first: optional @@ -678,12 +779,14 @@ types: parsed_label_config: type: optional> docs: JSON-formatted labeling configuration + access: read-only evaluate_predictions_automatically: type: optional docs: Retrieve and display predictions when loading a task config_has_control_tags: type: optional docs: Flag to detect is project ready for labeling + access: read-only skip_queue: optional reveal_preannotations_interactively: type: optional @@ -694,8 +797,13 @@ types: finished_task_number: type: optional docs: Finished tasks - queue_total: optional - queue_done: optional + access: read-only + queue_total: + type: optional + access: read-only + queue_done: + type: optional + access: read-only source: openapi: openapi/openapi.yaml ProjectLabelConfig: @@ -718,7 +826,9 @@ types: openapi: openapi/openapi.yaml ConvertedFormat: properties: - id: optional + id: + type: optional + access: read-only status: optional export_type: type: string @@ -745,11 +855,14 @@ types: type: optional validation: maxLength: 2048 - id: optional + id: + type: optional + access: read-only created_by: optional created_at: type: optional docs: Creation time + access: read-only finished_at: type: optional docs: Complete or fail time @@ -774,7 +887,9 @@ types: openapi: openapi/openapi.yaml ProjectImport: properties: - id: optional + id: + type: optional + access: read-only preannotated_from_fields: optional> commit_to_project: optional return_task_ids: optional @@ -788,9 +903,11 @@ types: created_at: type: optional docs: Creation time + access: read-only updated_at: type: optional docs: Updated time + access: read-only finished_at: type: optional docs: Complete or fail time @@ -818,7 +935,9 @@ types: openapi: openapi/openapi.yaml ProjectReimport: properties: - id: optional + id: + type: optional + access: read-only status: optional error: optional task_count: optional @@ -845,10 +964,13 @@ types: openapi: openapi/openapi.yaml AzureBlobImportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: azure + access: read-only synchronizable: type: optional default: true @@ -902,6 +1024,7 @@ types: created_at: type: optional docs: Creation time + access: read-only presign_ttl: type: optional docs: Presigned URLs TTL (in minutes) @@ -922,10 +1045,13 @@ types: openapi: openapi/openapi.yaml AzureBlobExportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: azure + access: read-only synchronizable: type: optional default: true @@ -976,6 +1102,7 @@ types: created_at: type: optional docs: Creation time + access: read-only can_delete_objects: type: optional docs: Deletion from storage enabled @@ -996,10 +1123,13 @@ types: openapi: openapi/openapi.yaml GcsExportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: gcs + access: read-only synchronizable: type: optional default: true @@ -1050,6 +1180,7 @@ types: created_at: type: optional docs: Creation time + access: read-only can_delete_objects: type: optional docs: Deletion from storage enabled @@ -1070,10 +1201,13 @@ types: openapi: openapi/openapi.yaml LocalFilesExportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: localfiles + access: read-only synchronizable: type: optional default: true @@ -1115,6 +1249,7 @@ types: created_at: type: optional docs: Creation time + access: read-only can_delete_objects: type: optional docs: Deletion from storage enabled @@ -1135,10 +1270,13 @@ types: openapi: openapi/openapi.yaml RedisExportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: redis + access: read-only synchronizable: type: optional default: true @@ -1189,6 +1327,7 @@ types: created_at: type: optional docs: Creation time + access: read-only can_delete_objects: type: optional docs: Deletion from storage enabled @@ -1212,10 +1351,13 @@ types: openapi: openapi/openapi.yaml S3ExportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: s3 + access: read-only synchronizable: type: optional default: true @@ -1248,6 +1390,7 @@ types: created_at: type: optional docs: Creation time + access: read-only can_delete_objects: type: optional docs: Deletion from storage enabled @@ -1298,10 +1441,13 @@ types: openapi: openapi/openapi.yaml GcsImportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: gcs + access: read-only synchronizable: type: optional default: true @@ -1355,6 +1501,7 @@ types: created_at: type: optional docs: Creation time + access: read-only presign_ttl: type: optional docs: Presigned URLs TTL (in minutes) @@ -1375,10 +1522,13 @@ types: openapi: openapi/openapi.yaml LocalFilesImportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: localfiles + access: read-only synchronizable: type: optional default: true @@ -1420,6 +1570,7 @@ types: created_at: type: optional docs: Creation time + access: read-only project: type: integer docs: A unique integer value identifying this project. @@ -1437,10 +1588,13 @@ types: openapi: openapi/openapi.yaml RedisImportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: redis + access: read-only synchronizable: type: optional default: true @@ -1491,6 +1645,7 @@ types: created_at: type: optional docs: Creation time + access: read-only db: type: optional docs: Server Database @@ -1511,10 +1666,13 @@ types: openapi: openapi/openapi.yaml S3ImportStorage: properties: - id: optional + id: + type: optional + access: read-only type: type: optional default: s3 + access: read-only synchronizable: type: optional default: true @@ -1550,6 +1708,7 @@ types: created_at: type: optional docs: Creation time + access: read-only bucket: type: optional docs: S3 bucket name @@ -1615,7 +1774,9 @@ types: inline: true BaseTask: properties: - id: optional + id: + type: optional + access: read-only data: type: map docs: >- @@ -1631,9 +1792,11 @@ types: created_at: type: optional docs: Time a task was created + access: read-only updated_at: type: optional docs: Last time a task was updated + access: read-only is_labeled: type: optional docs: >- @@ -1697,19 +1860,23 @@ types: openapi: openapi/openapi.yaml AnnotationsDmField: properties: - id: optional + id: + type: optional + access: read-only result: type: optional>> docs: List of annotation results for the task created_username: type: optional docs: Username string + access: read-only created_ago: type: optional docs: Time delta from creation time default: '' validation: minLength: 1 + access: read-only completed_by: type: optional> docs: User details who completed this annotation. @@ -1726,9 +1893,11 @@ types: created_at: type: optional docs: Creation time + access: read-only updated_at: type: optional docs: Last updated time + access: read-only draft_created_at: type: optional docs: Draft creation time @@ -1807,34 +1976,54 @@ types: inline: true DataManagerTaskSerializer: properties: - id: optional + id: + type: optional + access: read-only predictions: type: optional> docs: Predictions for this task - annotations: optional> + access: read-only + annotations: + type: optional> + access: read-only drafts: type: optional> docs: Drafts for this task + access: read-only annotators: type: optional> docs: Annotators who annotated this task + access: read-only inner_id: optional cancelled_annotations: optional total_annotations: optional total_predictions: optional completed_at: optional - annotations_results: optional - predictions_results: optional + annotations_results: + type: optional + access: read-only + predictions_results: + type: optional + access: read-only predictions_score: optional - file_upload: optional - storage_filename: optional - annotations_ids: optional - predictions_model_versions: optional + file_upload: + type: optional + access: read-only + storage_filename: + type: optional + access: read-only + annotations_ids: + type: optional + access: read-only + predictions_model_versions: + type: optional + access: read-only avg_lead_time: optional draft_exists: optional updated_by: type: optional>> docs: User IDs who updated this task + access: read-only data: type: map docs: >- @@ -1850,9 +2039,11 @@ types: created_at: type: optional docs: Time a task was created + access: read-only updated_at: type: optional docs: Last time a task was updated + access: read-only is_labeled: type: optional docs: >- @@ -1897,8 +2088,12 @@ types: openapi: openapi/openapi.yaml Webhook: properties: - id: optional - organization: optional + id: + type: optional + access: read-only + organization: + type: optional + access: read-only project: optional url: type: string @@ -1923,9 +2118,11 @@ types: created_at: type: optional docs: Creation time + access: read-only updated_at: type: optional docs: Last update time + access: read-only source: openapi: openapi/openapi.yaml WebhookSerializerForUpdateActionsItem: @@ -1947,9 +2144,15 @@ types: openapi: openapi/openapi.yaml WebhookSerializerForUpdate: properties: - id: optional - organization: optional - project: optional + id: + type: optional + access: read-only + organization: + type: optional + access: read-only + project: + type: optional + access: read-only url: type: string docs: URL of webhook @@ -1973,9 +2176,11 @@ types: created_at: type: optional docs: Creation time + access: read-only updated_at: type: optional docs: Last update time + access: read-only source: openapi: openapi/openapi.yaml TaskAnnotatorsItem: @@ -2081,6 +2286,7 @@ types: id: type: optional docs: Unique ID of the workspace + access: read-only title: type: optional docs: Workspace title @@ -2099,12 +2305,15 @@ types: created_at: type: optional docs: Creation time of the workspace + access: read-only updated_at: type: optional docs: Last updated time of the workspace + access: read-only created_by: type: optional docs: User ID of the workspace creator + access: read-only color: type: optional docs: Workspace color @@ -2122,7 +2331,9 @@ types: openapi: openapi/openapi.yaml S3SImportStorage: properties: - id: optional + id: + type: optional + access: read-only synchronizable: type: optional default: true @@ -2158,6 +2369,7 @@ types: created_at: type: optional docs: Creation time + access: read-only bucket: type: optional docs: S3 bucket name @@ -2195,7 +2407,9 @@ types: openapi: openapi/openapi.yaml S3SExportStorage: properties: - id: optional + id: + type: optional + access: read-only title: type: optional docs: Cloud storage title @@ -2207,6 +2421,7 @@ types: created_at: type: optional docs: Creation time + access: read-only bucket: type: optional docs: S3 bucket name @@ -2238,9 +2453,15 @@ types: organization: type: integer docs: Organization ID - contributed_projects_count: optional - annotations_count: optional - created_at: optional + contributed_projects_count: + type: optional + access: read-only + annotations_count: + type: optional + access: read-only + created_at: + type: optional + access: read-only source: openapi: openapi/openapi.yaml PausePausedBy: @@ -2260,6 +2481,7 @@ types: paused_by: type: optional docs: User who created the pause + access: read-only reason: string verbose_reason: optional deleted_by: optional @@ -2346,12 +2568,15 @@ types: created_by: type: optional docs: User ID of the creator of the prompt + access: read-only created_at: type: optional docs: Date and time the prompt was created + access: read-only updated_at: type: optional docs: Date and time the prompt was last updated + access: read-only organization: type: optional docs: Organization ID of the prompt @@ -2409,8 +2634,12 @@ types: provider: optional provider_model_id: optional created_by: optional - created_at: optional - updated_at: optional + created_at: + type: optional + access: read-only + updated_at: + type: optional + access: read-only organization: optional source: openapi: openapi/openapi.yaml @@ -2461,10 +2690,12 @@ types: refinement_job_id: type: optional docs: Unique identifier for the refinement job + access: read-only refinement_status: type: optional docs: Status of the refinement job default: Pending + access: read-only total_cost: type: optional docs: Total cost of the refinement job (in USD) @@ -2517,10 +2748,18 @@ types: type: InferenceRunProjectSubset status: optional job_id: optional - created_at: optional - triggered_at: optional - predictions_updated_at: optional - completed_at: optional + created_at: + type: optional + access: read-only + triggered_at: + type: optional + access: read-only + predictions_updated_at: + type: optional + access: read-only + completed_at: + type: optional + access: read-only source: openapi: openapi/openapi.yaml KeyIndicatorsItemAdditionalKpisItem: @@ -2635,15 +2874,22 @@ types: endpoint: optional scope: optional organization: optional - created_by: optional - created_at: optional - updated_at: optional + created_by: + type: optional + access: read-only + created_at: + type: optional + access: read-only + updated_at: + type: optional + access: read-only is_internal: type: optional docs: >- Whether the model provider connection is internal, not visible to the user. default: false + access: read-only budget_limit: type: optional docs: Budget limit for the model provider connection (null if unlimited) @@ -2790,11 +3036,14 @@ types: type: optional validation: maxLength: 2048 - id: optional + id: + type: optional + access: read-only created_by: optional created_at: type: optional docs: Creation time + access: read-only finished_at: type: optional docs: Complete or fail time diff --git a/.mock/definition/annotations.yml b/.mock/definition/annotations.yml index 834b256a5..301ecb4ce 100644 --- a/.mock/definition/annotations.yml +++ b/.mock/definition/annotations.yml @@ -28,6 +28,7 @@ service: response: docs: Retrieved annotation type: root.Annotation + status-code: 200 examples: - name: response path-parameters: @@ -161,6 +162,7 @@ service: response: docs: Updated annotation type: root.Annotation + status-code: 200 examples: - name: response path-parameters: @@ -245,6 +247,7 @@ service: response: docs: Annotation type: list + status-code: 200 examples: - name: response path-parameters: @@ -371,6 +374,7 @@ service: response: docs: Created annotation type: root.Annotation + status-code: 201 examples: - name: response path-parameters: @@ -455,6 +459,7 @@ service: response: docs: Annotations created successfully type: list + status-code: 201 examples: - request: {} response: diff --git a/.mock/definition/comments.yml b/.mock/definition/comments.yml index 1f9ffbdba..89c07315d 100644 --- a/.mock/definition/comments.yml +++ b/.mock/definition/comments.yml @@ -29,6 +29,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -66,6 +67,7 @@ service: response: docs: '' type: root.Comment + status-code: 201 examples: - request: {} response: @@ -99,6 +101,7 @@ service: response: docs: '' type: root.Comment + status-code: 200 examples: - path-parameters: id: 1 @@ -161,6 +164,7 @@ service: response: docs: '' type: root.Comment + status-code: 200 examples: - path-parameters: id: 1 diff --git a/.mock/definition/dataManager.yml b/.mock/definition/dataManager.yml index d17a3a030..df14e2b21 100644 --- a/.mock/definition/dataManager.yml +++ b/.mock/definition/dataManager.yml @@ -24,6 +24,7 @@ service: response: docs: Columns retrieved successfully type: unknown + status-code: 200 errors: - root.BadRequestError - root.NotFoundError @@ -94,6 +95,7 @@ service: response: docs: '' type: ApiDmViewsUpdateResponse + status-code: 200 examples: - path-parameters: id: id diff --git a/.mock/definition/exportStorage.yml b/.mock/definition/exportStorage.yml index fbe231718..8bb3bac3b 100644 --- a/.mock/definition/exportStorage.yml +++ b/.mock/definition/exportStorage.yml @@ -25,6 +25,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: diff --git a/.mock/definition/exportStorage/azure.yml b/.mock/definition/exportStorage/azure.yml index 9ee3978b3..aa5b8e125 100644 --- a/.mock/definition/exportStorage/azure.yml +++ b/.mock/definition/exportStorage/azure.yml @@ -35,6 +35,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -115,6 +116,7 @@ service: response: docs: '' type: AzureCreateResponse + status-code: 201 examples: - request: {} response: @@ -201,6 +203,7 @@ service: response: docs: '' type: root.AzureBlobExportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -310,6 +313,7 @@ service: response: docs: '' type: AzureUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -355,6 +359,7 @@ service: response: docs: '' type: root.AzureBlobExportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/exportStorage/gcs.yml b/.mock/definition/exportStorage/gcs.yml index 5154db589..e2b4df796 100644 --- a/.mock/definition/exportStorage/gcs.yml +++ b/.mock/definition/exportStorage/gcs.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -115,6 +116,7 @@ service: response: docs: '' type: GcsCreateResponse + status-code: 201 examples: - request: {} response: @@ -203,6 +205,7 @@ service: response: docs: '' type: root.GcsExportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -314,6 +317,7 @@ service: response: docs: '' type: GcsUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -358,6 +362,7 @@ service: response: docs: '' type: root.GcsExportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/exportStorage/local.yml b/.mock/definition/exportStorage/local.yml index 9f203259e..2b2b5c2e4 100644 --- a/.mock/definition/exportStorage/local.yml +++ b/.mock/definition/exportStorage/local.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -108,6 +109,7 @@ service: response: docs: '' type: LocalCreateResponse + status-code: 201 examples: - request: {} response: @@ -190,6 +192,7 @@ service: response: docs: '' type: root.LocalFilesExportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -294,6 +297,7 @@ service: response: docs: '' type: LocalUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -337,6 +341,7 @@ service: response: docs: '' type: root.LocalFilesExportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/exportStorage/redis.yml b/.mock/definition/exportStorage/redis.yml index e490dc336..0ef79a087 100644 --- a/.mock/definition/exportStorage/redis.yml +++ b/.mock/definition/exportStorage/redis.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -116,6 +117,7 @@ service: response: docs: '' type: RedisCreateResponse + status-code: 201 examples: - request: {} response: @@ -205,6 +207,7 @@ service: response: docs: '' type: root.RedisExportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -317,6 +320,7 @@ service: response: docs: '' type: RedisUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -363,6 +367,7 @@ service: response: docs: '' type: root.RedisExportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/exportStorage/s3.yml b/.mock/definition/exportStorage/s3.yml index 7ed022228..c04f30554 100644 --- a/.mock/definition/exportStorage/s3.yml +++ b/.mock/definition/exportStorage/s3.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -128,6 +129,7 @@ service: response: docs: '' type: S3CreateResponse + status-code: 201 examples: - request: {} response: @@ -229,6 +231,7 @@ service: response: docs: '' type: root.S3ExportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -353,6 +356,7 @@ service: response: docs: '' type: S3UpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -401,6 +405,7 @@ service: response: docs: '' type: root.S3ExportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/exportStorage/s3S.yml b/.mock/definition/exportStorage/s3S.yml index b3d5cb030..b6894f6ff 100644 --- a/.mock/definition/exportStorage/s3S.yml +++ b/.mock/definition/exportStorage/s3S.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -104,6 +105,7 @@ service: response: docs: '' type: root.S3SExportStorage + status-code: 201 examples: - request: {} response: @@ -140,6 +142,7 @@ service: response: docs: '' type: root.S3SExportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -233,6 +236,7 @@ service: response: docs: '' type: root.S3SExportStorage + status-code: 200 examples: - path-parameters: id: 1 diff --git a/.mock/definition/files.yml b/.mock/definition/files.yml index 1d1c1eac6..b76af3f86 100644 --- a/.mock/definition/files.yml +++ b/.mock/definition/files.yml @@ -21,6 +21,7 @@ service: response: docs: '' type: root.FileUpload + status-code: 200 examples: - path-parameters: id: 1 @@ -81,6 +82,7 @@ service: response: docs: '' type: root.FileUpload + status-code: 200 examples: - path-parameters: id: 1 @@ -125,6 +127,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - path-parameters: id: 1 diff --git a/.mock/definition/importStorage.yml b/.mock/definition/importStorage.yml index 6a7663a25..44461f808 100644 --- a/.mock/definition/importStorage.yml +++ b/.mock/definition/importStorage.yml @@ -25,6 +25,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: diff --git a/.mock/definition/importStorage/azure.yml b/.mock/definition/importStorage/azure.yml index ed30150b5..a19ff6b3f 100644 --- a/.mock/definition/importStorage/azure.yml +++ b/.mock/definition/importStorage/azure.yml @@ -35,6 +35,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -138,6 +139,7 @@ service: response: docs: '' type: AzureCreateResponse + status-code: 201 examples: - request: {} response: @@ -244,6 +246,7 @@ service: response: docs: '' type: root.AzureBlobImportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -377,6 +380,7 @@ service: response: docs: '' type: AzureUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -427,6 +431,7 @@ service: response: docs: '' type: root.AzureBlobImportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/importStorage/gcs.yml b/.mock/definition/importStorage/gcs.yml index 0db24201b..97b8f6bf2 100644 --- a/.mock/definition/importStorage/gcs.yml +++ b/.mock/definition/importStorage/gcs.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -140,6 +141,7 @@ service: response: docs: '' type: GcsCreateResponse + status-code: 201 examples: - request: {} response: @@ -249,6 +251,7 @@ service: response: docs: '' type: root.GcsImportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -385,6 +388,7 @@ service: response: docs: '' type: GcsUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -434,6 +438,7 @@ service: response: docs: '' type: root.GcsImportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/importStorage/local.yml b/.mock/definition/importStorage/local.yml index cc6d03c47..87a745fa0 100644 --- a/.mock/definition/importStorage/local.yml +++ b/.mock/definition/importStorage/local.yml @@ -36,6 +36,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -109,6 +110,7 @@ service: response: docs: '' type: LocalCreateResponse + status-code: 201 examples: - request: {} response: @@ -191,6 +193,7 @@ service: response: docs: '' type: root.LocalFilesImportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -300,6 +303,7 @@ service: response: docs: '' type: LocalUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -345,6 +349,7 @@ service: response: docs: '' type: root.LocalFilesImportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/importStorage/redis.yml b/.mock/definition/importStorage/redis.yml index 5289eb04a..ac9482785 100644 --- a/.mock/definition/importStorage/redis.yml +++ b/.mock/definition/importStorage/redis.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -122,6 +123,7 @@ service: response: docs: '' type: RedisCreateResponse + status-code: 201 examples: - request: {} response: @@ -218,6 +220,7 @@ service: response: docs: '' type: root.RedisImportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -342,6 +345,7 @@ service: response: docs: '' type: RedisUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -390,6 +394,7 @@ service: response: docs: '' type: root.RedisImportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/importStorage/s3.yml b/.mock/definition/importStorage/s3.yml index edd6ccb15..8586daed4 100644 --- a/.mock/definition/importStorage/s3.yml +++ b/.mock/definition/importStorage/s3.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -156,6 +157,7 @@ service: response: docs: '' type: S3CreateResponse + status-code: 201 examples: - request: {} response: @@ -282,6 +284,7 @@ service: response: docs: '' type: root.S3ImportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -435,6 +438,7 @@ service: response: docs: '' type: S3UpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -489,6 +493,7 @@ service: response: docs: '' type: root.S3ImportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/importStorage/s3S.yml b/.mock/definition/importStorage/s3S.yml index 831ab6002..20203b518 100644 --- a/.mock/definition/importStorage/s3S.yml +++ b/.mock/definition/importStorage/s3S.yml @@ -34,6 +34,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -147,6 +148,7 @@ service: response: docs: '' type: root.S3SImportStorage + status-code: 201 examples: - request: {} response: @@ -196,6 +198,7 @@ service: response: docs: '' type: root.S3SImportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -340,6 +343,7 @@ service: response: docs: '' type: root.S3SImportStorage + status-code: 200 examples: - path-parameters: id: 1 @@ -463,6 +467,7 @@ service: response: docs: '' type: root.S3SImportStorage + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/jwtSettings.yml b/.mock/definition/jwtSettings.yml index 6b6649ae0..b4db10a3a 100644 --- a/.mock/definition/jwtSettings.yml +++ b/.mock/definition/jwtSettings.yml @@ -16,6 +16,7 @@ service: response: docs: JWT settings retrieved successfully type: root.JwtSettingsResponse + status-code: 200 examples: - response: body: @@ -39,6 +40,7 @@ service: response: docs: JWT settings updated successfully type: root.JwtSettingsResponse + status-code: 200 examples: - request: api_tokens_enabled: true diff --git a/.mock/definition/labels.yml b/.mock/definition/labels.yml index 1154551c8..51509970d 100644 --- a/.mock/definition/labels.yml +++ b/.mock/definition/labels.yml @@ -38,6 +38,7 @@ service: response: docs: '' type: LabelsListResponse + status-code: 200 examples: - response: body: @@ -76,6 +77,7 @@ service: response: docs: '' type: list + status-code: 201 examples: - request: - project: 1 @@ -117,6 +119,7 @@ service: response: docs: '' type: root.Label + status-code: 200 examples: - path-parameters: id: id @@ -174,6 +177,7 @@ service: response: docs: '' type: root.Label + status-code: 200 examples: - path-parameters: id: id diff --git a/.mock/definition/ml.yml b/.mock/definition/ml.yml index c811d2329..b9b33b3b3 100644 --- a/.mock/definition/ml.yml +++ b/.mock/definition/ml.yml @@ -31,6 +31,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -118,6 +119,7 @@ service: response: docs: '' type: MlCreateResponse + status-code: 201 examples: - request: {} response: @@ -158,6 +160,7 @@ service: response: docs: '' type: root.MlBackend + status-code: 200 examples: - path-parameters: id: 1 @@ -267,6 +270,7 @@ service: response: docs: '' type: MlUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 diff --git a/.mock/definition/modelProviders.yml b/.mock/definition/modelProviders.yml index ef8f17e2f..c83c61720 100644 --- a/.mock/definition/modelProviders.yml +++ b/.mock/definition/modelProviders.yml @@ -17,6 +17,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -52,6 +53,7 @@ service: response: docs: '' type: root.ModelProviderConnection + status-code: 201 examples: - request: provider: OpenAI @@ -90,6 +92,7 @@ service: response: docs: '' type: root.ModelProviderConnection + status-code: 200 examples: - path-parameters: pk: 1 @@ -149,6 +152,7 @@ service: response: docs: '' type: root.ModelProviderConnection + status-code: 200 examples: - path-parameters: pk: 1 diff --git a/.mock/definition/organizations.yml b/.mock/definition/organizations.yml index e106e87b6..23120916c 100644 --- a/.mock/definition/organizations.yml +++ b/.mock/definition/organizations.yml @@ -17,6 +17,7 @@ service: response: docs: '' type: root.OrganizationInvite + status-code: 200 examples: - response: body: @@ -35,6 +36,7 @@ service: response: docs: '' type: root.OrganizationInvite + status-code: 200 examples: - response: body: @@ -54,6 +56,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -76,6 +79,7 @@ service: response: docs: '' type: root.Organization + status-code: 200 examples: - path-parameters: id: 1 @@ -108,6 +112,7 @@ service: response: docs: '' type: root.Organization + status-code: 200 examples: - path-parameters: id: 1 diff --git a/.mock/definition/organizations/members.yml b/.mock/definition/organizations/members.yml index c0ceeaf39..ca343f980 100644 --- a/.mock/definition/organizations/members.yml +++ b/.mock/definition/organizations/members.yml @@ -44,6 +44,7 @@ service: response: docs: '' type: MembersListResponse + status-code: 200 examples: - path-parameters: id: id @@ -75,6 +76,7 @@ service: response: docs: '' type: root.OrganizationMember + status-code: 200 examples: - path-parameters: id: id diff --git a/.mock/definition/predictions.yml b/.mock/definition/predictions.yml index 5af5dbe0c..46474065f 100644 --- a/.mock/definition/predictions.yml +++ b/.mock/definition/predictions.yml @@ -43,6 +43,7 @@ service: response: docs: Predictions list type: list + status-code: 200 examples: - name: response response: @@ -146,6 +147,7 @@ service: response: docs: Created prediction type: root.Prediction + status-code: 201 examples: - name: response request: @@ -224,6 +226,7 @@ service: response: docs: Prediction details type: root.Prediction + status-code: 200 examples: - name: response path-parameters: @@ -303,6 +306,7 @@ service: response: docs: Updated prediction type: root.Prediction + status-code: 200 examples: - name: response path-parameters: @@ -429,6 +433,7 @@ service: response: docs: Updated prediction type: root.Prediction + status-code: 200 examples: - name: response path-parameters: diff --git a/.mock/definition/projects.yml b/.mock/definition/projects.yml index 0a7e736c9..4c83362f7 100644 --- a/.mock/definition/projects.yml +++ b/.mock/definition/projects.yml @@ -224,6 +224,7 @@ service: response: docs: '' type: ProjectsListResponse + status-code: 200 examples: - response: body: @@ -378,6 +379,7 @@ service: response: docs: '' type: ProjectsCreateResponse + status-code: 201 examples: - request: {} response: @@ -438,6 +440,7 @@ service: response: docs: Project information type: root.Project + status-code: 200 examples: - name: response path-parameters: @@ -645,6 +648,7 @@ service: response: docs: '' type: ProjectsUpdateResponse + status-code: 200 examples: - path-parameters: id: 1 @@ -811,6 +815,7 @@ service: response: docs: Tasks successfully imported type: ProjectsImportTasksResponse + status-code: 201 errors: - root.BadRequestError examples: @@ -849,6 +854,7 @@ service: response: docs: '' type: root.ProjectReimport + status-code: 200 examples: - path-parameters: id: 1 @@ -940,6 +946,7 @@ service: response: docs: '' type: root.ProjectLabelConfig + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/projects/exports.yml b/.mock/definition/projects/exports.yml index a2c3a588f..e3ab85cf6 100644 --- a/.mock/definition/projects/exports.yml +++ b/.mock/definition/projects/exports.yml @@ -87,6 +87,7 @@ service: response: docs: Exported data in binary format type: file + status-code: 200 audiences: - public list_formats: @@ -114,6 +115,7 @@ service: response: docs: Export formats type: list + status-code: 200 examples: - path-parameters: id: 1 @@ -152,6 +154,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - path-parameters: project_id: 1 @@ -210,6 +213,7 @@ service: response: docs: '' type: root.ExportSnapshot + status-code: 201 examples: - path-parameters: project_id: 1 @@ -297,6 +301,7 @@ service: response: docs: Exported data in binary format type: file + status-code: 200 audiences: - public get: @@ -329,6 +334,7 @@ service: response: docs: '' type: root.Export + status-code: 200 examples: - path-parameters: project_id: 1 @@ -431,6 +437,7 @@ service: response: docs: '' type: ExportsConvertResponse + status-code: 201 examples: - path-parameters: project_id: 1 diff --git a/.mock/definition/projects/labels.yml b/.mock/definition/projects/labels.yml index 9107f8e3b..1d54b3114 100644 --- a/.mock/definition/projects/labels.yml +++ b/.mock/definition/projects/labels.yml @@ -36,6 +36,7 @@ service: response: docs: '' type: LabelsListResponse + status-code: 200 examples: - response: body: @@ -66,6 +67,7 @@ service: response: docs: '' type: root.LabelLink + status-code: 201 examples: - request: from_name: from_name @@ -93,6 +95,7 @@ service: response: docs: '' type: root.LabelLink + status-code: 200 examples: - path-parameters: id: id @@ -148,6 +151,7 @@ service: response: docs: '' type: root.LabelLink + status-code: 200 examples: - path-parameters: id: id diff --git a/.mock/definition/projects/pauses.yml b/.mock/definition/projects/pauses.yml index 9c9119f4a..d3d7d982f 100644 --- a/.mock/definition/projects/pauses.yml +++ b/.mock/definition/projects/pauses.yml @@ -28,6 +28,7 @@ service: response: docs: Successfully retrieved a list of pauses type: list + status-code: 200 examples: - path-parameters: project_pk: 1 @@ -71,6 +72,7 @@ service: response: docs: Successfully created a pause type: root.Pause + status-code: 201 examples: - path-parameters: project_pk: 1 @@ -112,6 +114,7 @@ service: response: docs: Successfully retrieved the pause type: root.Pause + status-code: 200 examples: - path-parameters: project_pk: 1 @@ -184,6 +187,7 @@ service: response: docs: Successfully updated the pause (partial) type: root.Pause + status-code: 200 examples: - path-parameters: project_pk: 1 diff --git a/.mock/definition/prompts.yml b/.mock/definition/prompts.yml index e2a4aa6d7..4dfce7746 100644 --- a/.mock/definition/prompts.yml +++ b/.mock/definition/prompts.yml @@ -16,6 +16,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -49,6 +50,7 @@ service: response: docs: '' type: root.Prompt + status-code: 201 examples: - request: title: title @@ -89,6 +91,7 @@ service: response: docs: '' type: root.Prompt + status-code: 200 examples: - path-parameters: id: 1 @@ -146,6 +149,7 @@ service: response: docs: '' type: root.Prompt + status-code: 200 examples: - path-parameters: id: 1 @@ -193,6 +197,7 @@ service: response: docs: '' type: PromptsBatchPredictionsResponse + status-code: 201 examples: - request: {} response: @@ -222,6 +227,7 @@ service: response: docs: '' type: PromptsBatchFailedPredictionsResponse + status-code: 201 examples: - request: {} response: diff --git a/.mock/definition/prompts/indicators.yml b/.mock/definition/prompts/indicators.yml index 7c1087ab6..a6ac4ffa3 100644 --- a/.mock/definition/prompts/indicators.yml +++ b/.mock/definition/prompts/indicators.yml @@ -20,6 +20,7 @@ service: response: docs: '' type: root.KeyIndicators + status-code: 200 examples: - path-parameters: pk: 1 @@ -54,6 +55,7 @@ service: response: docs: '' type: root.KeyIndicatorValue + status-code: 200 examples: - path-parameters: indicator_key: indicator_key diff --git a/.mock/definition/prompts/runs.yml b/.mock/definition/prompts/runs.yml index aac1b6f9c..29ac2bd52 100644 --- a/.mock/definition/prompts/runs.yml +++ b/.mock/definition/prompts/runs.yml @@ -42,6 +42,7 @@ service: response: docs: Success type: root.InferenceRun + status-code: 200 examples: - path-parameters: id: 1 @@ -86,6 +87,7 @@ service: response: docs: '' type: root.InferenceRun + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/definition/prompts/versions.yml b/.mock/definition/prompts/versions.yml index dfef9de0a..c9d2b0886 100644 --- a/.mock/definition/prompts/versions.yml +++ b/.mock/definition/prompts/versions.yml @@ -20,6 +20,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - path-parameters: id: 1 @@ -56,6 +57,7 @@ service: response: docs: '' type: root.PromptVersion + status-code: 201 examples: - path-parameters: id: 1 @@ -93,6 +95,7 @@ service: response: docs: '' type: root.PromptVersion + status-code: 200 examples: - path-parameters: id: 1 @@ -155,6 +158,7 @@ service: response: docs: '' type: root.PromptVersion + status-code: 200 examples: - path-parameters: id: 1 @@ -207,6 +211,7 @@ service: response: docs: '' type: root.InferenceRunCostEstimate + status-code: 200 examples: - path-parameters: prompt_id: 1 @@ -251,6 +256,7 @@ service: response: docs: '' type: root.RefinedPromptResponse + status-code: 200 examples: - path-parameters: prompt_id: 1 @@ -316,6 +322,7 @@ service: response: docs: '' type: root.RefinedPromptResponse + status-code: 201 examples: - path-parameters: prompt_id: 1 diff --git a/.mock/definition/tasks.yml b/.mock/definition/tasks.yml index 61ac267e3..c7eaa2e07 100644 --- a/.mock/definition/tasks.yml +++ b/.mock/definition/tasks.yml @@ -36,6 +36,7 @@ service: response: docs: '' type: root.ProjectImport + status-code: 200 examples: - path-parameters: id: 1 @@ -182,6 +183,7 @@ service: response: docs: List of Tasks type: TasksListResponse + status-code: 200 examples: - name: response response: @@ -284,6 +286,7 @@ service: response: docs: Created task type: root.BaseTask + status-code: 201 examples: - name: response request: @@ -340,6 +343,7 @@ service: response: docs: Task type: root.DataManagerTaskSerializer + status-code: 200 examples: - name: response path-parameters: @@ -394,15 +398,11 @@ service: cancelled_annotations: 0 total_annotations: 0 total_predictions: 0 - completed_at: '2024-01-15T09:30:00Z' annotations_results: '' predictions_results: '' - predictions_score: 1.1 file_upload: 6b25fc23-some_3.mp4 - storage_filename: storage_filename annotations_ids: '' predictions_model_versions: '' - avg_lead_time: 1.1 draft_exists: false updated_by: - key: value @@ -416,7 +416,6 @@ service: overlap: 1 comment_count: 0 unresolved_comment_count: 0 - last_comment_updated_at: '2024-01-15T09:30:00Z' project: 1 comment_authors: - 1 @@ -482,6 +481,7 @@ service: response: docs: Updated task type: root.BaseTask + status-code: 200 examples: - name: response path-parameters: diff --git a/.mock/definition/tokens.yml b/.mock/definition/tokens.yml index a58b8a902..35705873d 100644 --- a/.mock/definition/tokens.yml +++ b/.mock/definition/tokens.yml @@ -40,6 +40,7 @@ service: response: docs: List of API tokens retrieved successfully type: list + status-code: 200 examples: - response: body: @@ -60,6 +61,7 @@ service: response: docs: Token created successfully type: root.ApiTokenResponse + status-code: 200 examples: - response: body: @@ -88,6 +90,7 @@ service: response: docs: New access token created successfully type: root.AccessTokenResponse + status-code: 200 errors: - root.UnauthorizedError examples: @@ -118,6 +121,7 @@ service: response: docs: Refresh token successfully rotated type: root.RotateTokenResponse + status-code: 200 errors: - root.BadRequestError examples: diff --git a/.mock/definition/users.yml b/.mock/definition/users.yml index b89f4cbf5..0b8345c87 100644 --- a/.mock/definition/users.yml +++ b/.mock/definition/users.yml @@ -32,6 +32,7 @@ service: response: docs: User token response type: UsersResetTokenResponse + status-code: 201 examples: - response: body: @@ -53,6 +54,7 @@ service: response: docs: User token response type: UsersGetTokenResponse + status-code: 200 examples: - response: body: @@ -72,6 +74,7 @@ service: response: docs: '' type: root.BaseUser + status-code: 200 examples: - response: body: @@ -102,6 +105,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -164,6 +168,7 @@ service: response: docs: '' type: root.BaseUser + status-code: 201 examples: - request: {} response: @@ -202,6 +207,7 @@ service: response: docs: '' type: root.BaseUser + status-code: 200 examples: - path-parameters: id: 1 @@ -304,6 +310,7 @@ service: response: docs: '' type: root.BaseUser + status-code: 200 examples: - path-parameters: id: 1 @@ -343,6 +350,7 @@ service: response: docs: Successfully retrieved product tour type: root.ProductTour + status-code: 200 examples: - query-parameters: name: name @@ -383,6 +391,7 @@ service: response: docs: Successfully updated product tour type: root.ProductTour + status-code: 200 audiences: - internal source: diff --git a/.mock/definition/versions.yml b/.mock/definition/versions.yml index 2f052dcb5..6c8b533b9 100644 --- a/.mock/definition/versions.yml +++ b/.mock/definition/versions.yml @@ -53,6 +53,7 @@ service: response: docs: '' type: VersionsGetResponse + status-code: 200 examples: - response: body: diff --git a/.mock/definition/views.yml b/.mock/definition/views.yml index 275a465f5..71676bdc5 100644 --- a/.mock/definition/views.yml +++ b/.mock/definition/views.yml @@ -30,6 +30,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -82,6 +83,7 @@ service: response: docs: '' type: root.View + status-code: 201 examples: - request: {} response: @@ -149,6 +151,7 @@ service: response: docs: '' type: root.View + status-code: 200 examples: - path-parameters: id: id @@ -221,6 +224,7 @@ service: response: docs: '' type: root.View + status-code: 200 examples: - path-parameters: id: id diff --git a/.mock/definition/webhooks.yml b/.mock/definition/webhooks.yml index a2eddd057..a550c4a8a 100644 --- a/.mock/definition/webhooks.yml +++ b/.mock/definition/webhooks.yml @@ -33,6 +33,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -82,6 +83,7 @@ service: response: docs: '' type: root.Webhook + status-code: 201 examples: - request: url: url @@ -147,6 +149,7 @@ service: response: docs: '' type: root.Webhook + status-code: 200 examples: - path-parameters: id: 1 @@ -209,6 +212,7 @@ service: response: docs: '' type: root.WebhookSerializerForUpdate + status-code: 200 examples: - path-parameters: id: 1 @@ -308,6 +312,7 @@ service: response: docs: '' type: root.WebhookSerializerForUpdate + status-code: 200 examples: - path-parameters: id: 1 diff --git a/.mock/definition/workspaces.yml b/.mock/definition/workspaces.yml index 5dc022251..39fe64b06 100644 --- a/.mock/definition/workspaces.yml +++ b/.mock/definition/workspaces.yml @@ -26,6 +26,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - response: body: @@ -87,6 +88,7 @@ service: response: docs: '' type: root.Workspace + status-code: 201 examples: - request: {} response: @@ -121,6 +123,7 @@ service: response: docs: '' type: root.Workspace + status-code: 200 examples: - path-parameters: id: 1 @@ -200,6 +203,7 @@ service: response: docs: '' type: root.Workspace + status-code: 200 examples: - path-parameters: id: 1 diff --git a/.mock/definition/workspaces/members.yml b/.mock/definition/workspaces/members.yml index cb269579f..c073be6ed 100644 --- a/.mock/definition/workspaces/members.yml +++ b/.mock/definition/workspaces/members.yml @@ -36,6 +36,7 @@ service: response: docs: '' type: list + status-code: 200 examples: - path-parameters: id: 1 @@ -71,6 +72,7 @@ service: response: docs: '' type: MembersCreateResponse + status-code: 201 examples: - path-parameters: id: 1 diff --git a/.mock/fern.config.json b/.mock/fern.config.json index b7c6686cb..b26fef150 100644 --- a/.mock/fern.config.json +++ b/.mock/fern.config.json @@ -1,4 +1,4 @@ { "organization" : "humansignal-org", - "version" : "0.51.7" + "version" : "0.62.4" } \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 3b78f4301..c6f1d19d3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -331,27 +331,30 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] [[package]] name = "faker" -version = "37.1.0" +version = "37.3.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.9" files = [ - {file = "faker-37.1.0-py3-none-any.whl", hash = "sha256:dc2f730be71cb770e9c715b13374d80dbcee879675121ab51f9683d262ae9a1c"}, - {file = "faker-37.1.0.tar.gz", hash = "sha256:ad9dc66a3b84888b837ca729e85299a96b58fdaef0323ed0baace93c9614af06"}, + {file = "faker-37.3.0-py3-none-any.whl", hash = "sha256:48c94daa16a432f2d2bc803c7ff602509699fca228d13e97e379cd860a7e216e"}, + {file = "faker-37.3.0.tar.gz", hash = "sha256:77b79e7a2228d57175133af0bbcdd26dc623df81db390ee52f5104d46c010f2f"}, ] [package.dependencies] @@ -877,48 +880,55 @@ files = [ [[package]] name = "mypy" -version = "1.0.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, - {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, - {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, - {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, - {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, - {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, - {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, - {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, - {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, - {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, - {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, - {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, - {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, - {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, - {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, - {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, - {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, - {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, - {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, - {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1262,18 +1272,18 @@ type = ["mypy (>=1.14.1)"] [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pydantic" @@ -1725,125 +1735,128 @@ httpx = ">=0.25.0" [[package]] name = "rpds-py" -version = "0.24.0" +version = "0.25.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724"}, - {file = "rpds_py-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875"}, - {file = "rpds_py-0.24.0-cp310-cp310-win32.whl", hash = "sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07"}, - {file = "rpds_py-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052"}, - {file = "rpds_py-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef"}, - {file = "rpds_py-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718"}, - {file = "rpds_py-0.24.0-cp311-cp311-win32.whl", hash = "sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a"}, - {file = "rpds_py-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6"}, - {file = "rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205"}, - {file = "rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56"}, - {file = "rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30"}, - {file = "rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034"}, - {file = "rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c"}, - {file = "rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9"}, - {file = "rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143"}, - {file = "rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a"}, - {file = "rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114"}, - {file = "rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c"}, - {file = "rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba"}, - {file = "rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350"}, - {file = "rpds_py-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a36b452abbf29f68527cf52e181fced56685731c86b52e852053e38d8b60bc8d"}, - {file = "rpds_py-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b3b397eefecec8e8e39fa65c630ef70a24b09141a6f9fc17b3c3a50bed6b50e"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdabcd3beb2a6dca7027007473d8ef1c3b053347c76f685f5f060a00327b8b65"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5db385bacd0c43f24be92b60c857cf760b7f10d8234f4bd4be67b5b20a7c0b6b"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8097b3422d020ff1c44effc40ae58e67d93e60d540a65649d2cdaf9466030791"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493fe54318bed7d124ce272fc36adbf59d46729659b2c792e87c3b95649cdee9"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aa362811ccdc1f8dadcc916c6d47e554169ab79559319ae9fae7d7752d0d60c"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d8f9a6e7fd5434817526815f09ea27f2746c4a51ee11bb3439065f5fc754db58"}, - {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8205ee14463248d3349131bb8099efe15cd3ce83b8ef3ace63c7e976998e7124"}, - {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:921ae54f9ecba3b6325df425cf72c074cd469dea843fb5743a26ca7fb2ccb149"}, - {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32bab0a56eac685828e00cc2f5d1200c548f8bc11f2e44abf311d6b548ce2e45"}, - {file = "rpds_py-0.24.0-cp39-cp39-win32.whl", hash = "sha256:f5c0ed12926dec1dfe7d645333ea59cf93f4d07750986a586f511c0bc61fe103"}, - {file = "rpds_py-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:afc6e35f344490faa8276b5f2f7cbf71f88bc2cda4328e00553bd451728c571f"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e0f3ef95795efcd3b2ec3fe0a5bcfb5dadf5e3996ea2117427e524d4fbf309c6"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2c13777ecdbbba2077670285dd1fe50828c8742f6a4119dbef6f83ea13ad10fb"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e8d804c2ccd618417e96720ad5cd076a86fa3f8cb310ea386a3e6229bae7d1"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd822f019ccccd75c832deb7aa040bb02d70a92eb15a2f16c7987b7ad4ee8d83"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0047638c3aa0dbcd0ab99ed1e549bbf0e142c9ecc173b6492868432d8989a046"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5b66d1b201cc71bc3081bc2f1fc36b0c1f268b773e03bbc39066651b9e18391"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbcbb6db5582ea33ce46a5d20a5793134b5365110d84df4e30b9d37c6fd40ad3"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63981feca3f110ed132fd217bf7768ee8ed738a55549883628ee3da75bb9cb78"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3a55fc10fdcbf1a4bd3c018eea422c52cf08700cf99c28b5cb10fe97ab77a0d3"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:c30ff468163a48535ee7e9bf21bd14c7a81147c0e58a36c1078289a8ca7af0bd"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:369d9c6d4c714e36d4a03957b4783217a3ccd1e222cdd67d464a3a479fc17796"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:24795c099453e3721fda5d8ddd45f5dfcc8e5a547ce7b8e9da06fecc3832e26f"}, - {file = "rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e"}, + {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, + {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2"}, + {file = "rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24"}, + {file = "rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a"}, + {file = "rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d"}, + {file = "rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042"}, + {file = "rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc"}, + {file = "rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4"}, + {file = "rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4"}, + {file = "rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c"}, + {file = "rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb"}, + {file = "rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe"}, + {file = "rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192"}, + {file = "rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728"}, + {file = "rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559"}, + {file = "rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd"}, + {file = "rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31"}, + {file = "rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500"}, + {file = "rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5"}, + {file = "rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129"}, + {file = "rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66"}, + {file = "rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523"}, + {file = "rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763"}, + {file = "rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd"}, + {file = "rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f"}, + {file = "rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449"}, + {file = "rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793"}, + {file = "rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3"}, ] [[package]] @@ -1859,29 +1872,29 @@ files = [ [[package]] name = "ruff" -version = "0.5.7" +version = "0.11.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, - {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, - {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, - {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, - {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, - {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, - {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, + {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, + {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, + {file = "ruff-0.11.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4bfd80a6ec559a5eeb96c33f832418bf0fb96752de0539905cf7b0cc1d31d779"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0947c0a1afa75dcb5db4b34b070ec2bccee869d40e6cc8ab25aca11a7d527794"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad871ff74b5ec9caa66cb725b85d4ef89b53f8170f47c3406e32ef040400b038"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6cf918390cfe46d240732d4d72fa6e18e528ca1f60e318a10835cf2fa3dc19f"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56145ee1478582f61c08f21076dc59153310d606ad663acc00ea3ab5b2125f82"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5f66f8f1e8c9fc594cbd66fbc5f246a8d91f916cb9667e80208663ec3728304"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80b4df4d335a80315ab9afc81ed1cff62be112bd165e162b5eed8ac55bfc8470"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3068befab73620b8a0cc2431bd46b3cd619bc17d6f7695a3e1bb166b652c382a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f5da2e710a9641828e09aa98b92c9ebbc60518fdf3921241326ca3e8f8e55b8b"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ef39f19cb8ec98cbc762344921e216f3857a06c47412030374fffd413fb8fd3a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2a7cedf47244f431fd11aa5a7e2806dda2e0c365873bda7834e8f7d785ae159"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:81be52e7519f3d1a0beadcf8e974715b2dfc808ae8ec729ecfc79bddf8dbb783"}, + {file = "ruff-0.11.5-py3-none-win32.whl", hash = "sha256:e268da7b40f56e3eca571508a7e567e794f9bfcc0f412c4b607931d3af9c4afe"}, + {file = "ruff-0.11.5-py3-none-win_amd64.whl", hash = "sha256:6c6dc38af3cfe2863213ea25b6dc616d679205732dc0fb673356c2d69608f800"}, + {file = "ruff-0.11.5-py3-none-win_arm64.whl", hash = "sha256:67e241b4314f4eacf14a601d586026a962f4002a475aa702c69980a38087aa4e"}, + {file = "ruff-0.11.5.tar.gz", hash = "sha256:cae2e2439cb88853e421901ec040a758960b576126dab520fa08e9de431d1bef"}, ] [[package]] @@ -2007,13 +2020,13 @@ telegram = ["requests"] [[package]] name = "types-python-dateutil" -version = "2.9.0.20241206" +version = "2.9.0.20250516" description = "Typing stubs for python-dateutil" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, - {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, + {file = "types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93"}, + {file = "types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5"}, ] [[package]] @@ -2029,13 +2042,13 @@ files = [ [[package]] name = "typing-inspection" -version = "0.4.0" +version = "0.4.1" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" files = [ - {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, - {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, ] [package.dependencies] @@ -2258,4 +2271,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "9a44e720f99e6df236ed3540a290a268409c157b29fdab3a2d285d4dcac10429" +content-hash = "6ee0e0fc89cb79dfb3ca7a88457b39073a0c3d009e618a1fc4f9bfabc0b0e89b" diff --git a/pyproject.toml b/pyproject.toml index 53e9a0863..d7dce0254 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "label-studio-sdk" [tool.poetry] name = "label-studio-sdk" -version = "1.0.15.dev" +version = "1.0.15" description = "" readme = "README.md" authors = [] @@ -56,14 +56,14 @@ typing_extensions = ">= 4.0.0" ujson = ">=5.8.0" xmljson = "0.2.1" -[tool.poetry.dev-dependencies] -mypy = "1.0.1" +[tool.poetry.group.dev.dependencies] +mypy = "==1.13.0" pytest = "^7.4.0" pytest-asyncio = "^0.23.5" python-dateutil = "^2.9.0" types-python-dateutil = "^2.9.0.20240316" respx = "^0.22.0" -ruff = "^0.5.6" +ruff = "==0.11.5" [tool.pytest.ini_options] testpaths = [ "tests" ] @@ -75,6 +75,26 @@ plugins = ["pydantic.mypy"] [tool.ruff] line-length = 120 +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "I", # isort +] +ignore = [ + "E402", # Module level import not at top of file + "E501", # Line too long + "E711", # Comparison to `None` should be `cond is not None` + "E712", # Avoid equality comparisons to `True`; use `if ...:` checks + "E721", # Use `is` and `is not` for type comparisons, or `isinstance()` for insinstance checks + "E722", # Do not use bare `except` + "E731", # Do not assign a `lambda` expression, use a `def` + "F821", # Undefined name + "F841" # Local variable ... is assigned to but never used +] + +[tool.ruff.lint.isort] +section-order = ["future", "standard-library", "third-party", "first-party"] [build-system] requires = ["poetry-core"] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..bc8667a07 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,20 @@ +Pillow>=10.0.1 +appdirs>=1.4.3 +datamodel-code-generator==0.26.1 +httpx>=0.21.2 +ijson>=3.2.3 +jsf==0.11.2 +jsonschema>=4.23.0 +lxml>=4.2.5 +nltk==3.9.1 +numpy>=1.26.4,<3.0.0 +opencv-python==4.9.0 +pandas>=0.24.0 +pydantic>= 1.9.2 +pydantic-core==2.18.2 +pyjwt==2.10.1 +requests>=2.22.0 +requests-mock==1.12.1 +typing_extensions>= 4.0.0 +ujson>=5.8.0 +xmljson==0.2.1 diff --git a/src/label_studio_sdk/__init__.py b/src/label_studio_sdk/__init__.py index 2159010c2..3fb01bd96 100644 --- a/src/label_studio_sdk/__init__.py +++ b/src/label_studio_sdk/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( AccessTokenResponse, Annotation, diff --git a/src/label_studio_sdk/actions/__init__.py b/src/label_studio_sdk/actions/__init__.py index 99e4531ac..574cdb2ce 100644 --- a/src/label_studio_sdk/actions/__init__.py +++ b/src/label_studio_sdk/actions/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( ActionsCreateRequestFilters, ActionsCreateRequestFiltersConjunction, diff --git a/src/label_studio_sdk/actions/client.py b/src/label_studio_sdk/actions/client.py index 0d609bebd..01f45e2ba 100644 --- a/src/label_studio_sdk/actions/client.py +++ b/src/label_studio_sdk/actions/client.py @@ -1,16 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.actions_create_request_id import ActionsCreateRequestId +from .raw_client import AsyncRawActionsClient, RawActionsClient from .types.actions_create_request_filters import ActionsCreateRequestFilters -from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems +from .types.actions_create_request_id import ActionsCreateRequestId from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.client_wrapper import AsyncClientWrapper +from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +16,18 @@ class ActionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawActionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawActionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawActionsClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -42,18 +51,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> No ) client.actions.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -129,41 +128,32 @@ def create( ordering=["tasks:total_annotations"], ) """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="POST", - params={ - "id": id, - "project": project, - "view": view, - }, - json={ - "filters": convert_and_respect_annotation_metadata( - object_=filters, annotation=ActionsCreateRequestFilters, direction="write" - ), - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" - ), - "ordering": ordering, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + id=id, + project=project, + view=view, + filters=filters, + selected_items=selected_items, + ordering=ordering, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncActionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawActionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawActionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawActionsClient + """ + return self._raw_client async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -195,18 +185,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -290,33 +270,13 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="POST", - params={ - "id": id, - "project": project, - "view": view, - }, - json={ - "filters": convert_and_respect_annotation_metadata( - object_=filters, annotation=ActionsCreateRequestFilters, direction="write" - ), - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" - ), - "ordering": ordering, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + id=id, + project=project, + view=view, + filters=filters, + selected_items=selected_items, + ordering=ordering, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/actions/raw_client.py b/src/label_studio_sdk/actions/raw_client.py new file mode 100644 index 000000000..5c45e77ef --- /dev/null +++ b/src/label_studio_sdk/actions/raw_client.py @@ -0,0 +1,223 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from .types.actions_create_request_filters import ActionsCreateRequestFilters +from .types.actions_create_request_id import ActionsCreateRequestId +from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem +from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawActionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Retrieve all the registered actions with descriptions that data manager can use. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + id: ActionsCreateRequestId, + project: int, + view: typing.Optional[int] = None, + filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, + selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, + ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` + + Parameters + ---------- + id : ActionsCreateRequestId + Action name ID, see the full list of actions in the `GET api/actions` request + + project : int + Project ID + + view : typing.Optional[int] + View ID (optional, it has higher priority than filters, selectedItems and ordering from the request body payload) + + filters : typing.Optional[ActionsCreateRequestFilters] + Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + + selected_items : typing.Optional[ActionsCreateRequestSelectedItems] + Task selection by IDs. If filters are applied, the selection will be applied to the filtered tasks.If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + + ordering : typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] + List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="POST", + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, annotation=ActionsCreateRequestFilters, direction="write" + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawActionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[None]: + """ + Retrieve all the registered actions with descriptions that data manager can use. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + id: ActionsCreateRequestId, + project: int, + view: typing.Optional[int] = None, + filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, + selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, + ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` + + Parameters + ---------- + id : ActionsCreateRequestId + Action name ID, see the full list of actions in the `GET api/actions` request + + project : int + Project ID + + view : typing.Optional[int] + View ID (optional, it has higher priority than filters, selectedItems and ordering from the request body payload) + + filters : typing.Optional[ActionsCreateRequestFilters] + Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + + selected_items : typing.Optional[ActionsCreateRequestSelectedItems] + Task selection by IDs. If filters are applied, the selection will be applied to the filtered tasks.If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + + ordering : typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] + List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="POST", + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, annotation=ActionsCreateRequestFilters, direction="write" + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/actions/types/__init__.py b/src/label_studio_sdk/actions/types/__init__.py index f44d52635..18d6b19ea 100644 --- a/src/label_studio_sdk/actions/types/__init__.py +++ b/src/label_studio_sdk/actions/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .actions_create_request_filters import ActionsCreateRequestFilters from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction from .actions_create_request_filters_items_item import ActionsCreateRequestFiltersItemsItem diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters.py b/src/label_studio_sdk/actions/types/actions_create_request_filters.py index 170a4acd6..1a20a01c2 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction -import pydantic import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction from .actions_create_request_filters_items_item import ActionsCreateRequestFiltersItemsItem -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py index f70a875f2..c5442ec10 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .actions_create_request_filters_items_item_filter import ActionsCreateRequestFiltersItemsItemFilter +import typing + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .actions_create_request_filters_items_item_filter import ActionsCreateRequestFiltersItemsItemFilter from .actions_create_request_filters_items_item_operator import ActionsCreateRequestFiltersItemsItemOperator from .actions_create_request_filters_items_item_value import ActionsCreateRequestFiltersItemsItemValue -from ...core.pydantic_utilities import IS_PYDANTIC_V2 -import typing class ActionsCreateRequestFiltersItemsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py index 64eb7cee4..2ceac5aac 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py @@ -1,8 +1,9 @@ # This file was auto-generated by Fern from our API Definition. import typing -from .actions_create_request_selected_items_included import ActionsCreateRequestSelectedItemsIncluded + from .actions_create_request_selected_items_excluded import ActionsCreateRequestSelectedItemsExcluded +from .actions_create_request_selected_items_included import ActionsCreateRequestSelectedItemsIncluded ActionsCreateRequestSelectedItems = typing.Union[ ActionsCreateRequestSelectedItemsIncluded, ActionsCreateRequestSelectedItemsExcluded diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py index e1d15e854..89b9d4466 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel +import typing + +import pydantic import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata -import pydantic -import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestSelectedItemsExcluded(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py index 90f2ec326..7d943e6c5 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel +import typing + +import pydantic import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata -import pydantic -import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestSelectedItemsIncluded(UniversalBaseModel): diff --git a/src/label_studio_sdk/annotations/__init__.py b/src/label_studio_sdk/annotations/__init__.py index fb830a2b1..51c6a9415 100644 --- a/src/label_studio_sdk/annotations/__init__.py +++ b/src/label_studio_sdk/annotations/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import AnnotationsCreateBulkRequestSelectedItems, AnnotationsCreateBulkResponseItem __all__ = ["AnnotationsCreateBulkRequestSelectedItems", "AnnotationsCreateBulkResponseItem"] diff --git a/src/label_studio_sdk/annotations/client.py b/src/label_studio_sdk/annotations/client.py index 3ada05018..f6c692d23 100644 --- a/src/label_studio_sdk/annotations/client.py +++ b/src/label_studio_sdk/annotations/client.py @@ -1,17 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.annotation import Annotation -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from .raw_client import AsyncRawAnnotationsClient, RawAnnotationsClient from .types.annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems from .types.annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -19,7 +15,18 @@ class AnnotationsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAnnotationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAnnotationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAnnotationsClient + """ + return self._raw_client def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ @@ -52,24 +59,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -103,18 +94,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -206,38 +187,19 @@ def update( ground_truth=True, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Annotation]: """ @@ -270,24 +232,8 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Annotation], - parse_obj_as( - type_=typing.List[Annotation], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, request_options=request_options) + return _response.data def create( self, @@ -392,38 +338,19 @@ def create( ground_truth=True, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="POST", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create_bulk( self, @@ -467,42 +394,31 @@ def create_bulk( ) client.annotations.create_bulk() """ - _response = self._client_wrapper.httpx_client.request( - "api/annotations/bulk", - method="POST", - json={ - "tasks": tasks, - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" - ), - "lead_time": lead_time, - "project": project, - "result": result, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create_bulk( + tasks=tasks, + selected_items=selected_items, + lead_time=lead_time, + project=project, + result=result, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AnnotationsCreateBulkResponseItem], - parse_obj_as( - type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncAnnotationsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAnnotationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAnnotationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAnnotationsClient + """ + return self._raw_client async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ @@ -543,24 +459,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -602,18 +502,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -713,38 +603,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -787,24 +658,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Annotation], - parse_obj_as( - type_=typing.List[Annotation], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, request_options=request_options) + return _response.data async def create( self, @@ -917,38 +772,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="POST", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create_bulk( self, @@ -1000,34 +836,12 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/annotations/bulk", - method="POST", - json={ - "tasks": tasks, - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" - ), - "lead_time": lead_time, - "project": project, - "result": result, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create_bulk( + tasks=tasks, + selected_items=selected_items, + lead_time=lead_time, + project=project, + result=result, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AnnotationsCreateBulkResponseItem], - parse_obj_as( - type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/annotations/raw_client.py b/src/label_studio_sdk/annotations/raw_client.py new file mode 100644 index 000000000..e6e2a2650 --- /dev/null +++ b/src/label_studio_sdk/annotations/raw_client.py @@ -0,0 +1,794 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.annotation import Annotation +from .types.annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems +from .types.annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAnnotationsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Annotation]: + """ + + Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. + + You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Annotation] + Retrieved annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete an annotation. + + This action can't be undone! + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Annotation]: + """ + + Update attributes for an existing annotation. + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Annotation] + Updated annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[Annotation]]: + """ + + List all annotations for a task. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + Parameters + ---------- + id : int + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Annotation]] + Annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Annotation]: + """ + + Add annotations to a task like an annotator does. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + + The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST + request to send an empty annotation with the ID of the user who completed the task: + + ```json + { + "result": {}, + "was_cancelled": true, + "ground_truth": true, + "lead_time": 0, + "task": 0 + "completed_by": 123 + } + ``` + + Parameters + ---------- + id : int + Task ID + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Annotation] + Created annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="POST", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create_bulk( + self, + *, + tasks: typing.Optional[typing.Sequence[int]] = OMIT, + selected_items: typing.Optional[AnnotationsCreateBulkRequestSelectedItems] = OMIT, + lead_time: typing.Optional[float] = OMIT, + project: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[AnnotationsCreateBulkResponseItem]]: + """ + Create multiple annotations for specific tasks in a bulk operation. + + Parameters + ---------- + tasks : typing.Optional[typing.Sequence[int]] + + selected_items : typing.Optional[AnnotationsCreateBulkRequestSelectedItems] + + lead_time : typing.Optional[float] + + project : typing.Optional[int] + + result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[AnnotationsCreateBulkResponseItem]] + Annotations created successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/annotations/bulk", + method="POST", + json={ + "tasks": tasks, + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" + ), + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawAnnotationsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Annotation]: + """ + + Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. + + You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Annotation] + Retrieved annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete an annotation. + + This action can't be undone! + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Annotation]: + """ + + Update attributes for an existing annotation. + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Annotation] + Updated annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Annotation]]: + """ + + List all annotations for a task. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + Parameters + ---------- + id : int + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Annotation]] + Annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Annotation]: + """ + + Add annotations to a task like an annotator does. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + + The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST + request to send an empty annotation with the ID of the user who completed the task: + + ```json + { + "result": {}, + "was_cancelled": true, + "ground_truth": true, + "lead_time": 0, + "task": 0 + "completed_by": 123 + } + ``` + + Parameters + ---------- + id : int + Task ID + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Annotation] + Created annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="POST", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create_bulk( + self, + *, + tasks: typing.Optional[typing.Sequence[int]] = OMIT, + selected_items: typing.Optional[AnnotationsCreateBulkRequestSelectedItems] = OMIT, + lead_time: typing.Optional[float] = OMIT, + project: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[AnnotationsCreateBulkResponseItem]]: + """ + Create multiple annotations for specific tasks in a bulk operation. + + Parameters + ---------- + tasks : typing.Optional[typing.Sequence[int]] + + selected_items : typing.Optional[AnnotationsCreateBulkRequestSelectedItems] + + lead_time : typing.Optional[float] + + project : typing.Optional[int] + + result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[AnnotationsCreateBulkResponseItem]] + Annotations created successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/annotations/bulk", + method="POST", + json={ + "tasks": tasks, + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" + ), + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/annotations/types/__init__.py b/src/label_studio_sdk/annotations/types/__init__.py index 0ec046f76..4bacc3ec7 100644 --- a/src/label_studio_sdk/annotations/types/__init__.py +++ b/src/label_studio_sdk/annotations/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems from .annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem diff --git a/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py b/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py index c3209d6c3..5a1c02d68 100644 --- a/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py +++ b/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ...core.serialization import FieldMetadata + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata class AnnotationsCreateBulkRequestSelectedItems(UniversalBaseModel): diff --git a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py index a023f4f3d..856097dc3 100644 --- a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +++ b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AnnotationsCreateBulkResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/base_client.py b/src/label_studio_sdk/base_client.py index 9e9490f1f..c5e01f8ba 100644 --- a/src/label_studio_sdk/base_client.py +++ b/src/label_studio_sdk/base_client.py @@ -1,50 +1,31 @@ # This file was auto-generated by Fern from our API Definition. -import typing -from .environment import LabelStudioEnvironment import os +import typing + import httpx +from .actions.client import ActionsClient, AsyncActionsClient +from .annotations.client import AnnotationsClient, AsyncAnnotationsClient +from .comments.client import AsyncCommentsClient, CommentsClient from .core.api_error import ApiError -from .core.client_wrapper import SyncClientWrapper -from .annotations.client import AnnotationsClient -from .users.client import UsersClient -from .actions.client import ActionsClient -from .views.client import ViewsClient -from .files.client import FilesClient -from .ml.client import MlClient -from .predictions.client import PredictionsClient -from .projects.client import ProjectsClient -from .tasks.client import TasksClient -from .import_storage.client import ImportStorageClient -from .export_storage.client import ExportStorageClient -from .webhooks.client import WebhooksClient -from .versions.client import VersionsClient -from .prompts.client import PromptsClient -from .model_providers.client import ModelProvidersClient -from .comments.client import CommentsClient -from .workspaces.client import WorkspacesClient -from .tokens.client import TokensClient -from .jwt_settings.client import JwtSettingsClient -from .core.client_wrapper import AsyncClientWrapper -from .annotations.client import AsyncAnnotationsClient -from .users.client import AsyncUsersClient -from .actions.client import AsyncActionsClient -from .views.client import AsyncViewsClient -from .files.client import AsyncFilesClient -from .ml.client import AsyncMlClient -from .predictions.client import AsyncPredictionsClient -from .projects.client import AsyncProjectsClient -from .tasks.client import AsyncTasksClient -from .import_storage.client import AsyncImportStorageClient -from .export_storage.client import AsyncExportStorageClient -from .webhooks.client import AsyncWebhooksClient -from .versions.client import AsyncVersionsClient -from .prompts.client import AsyncPromptsClient -from .model_providers.client import AsyncModelProvidersClient -from .comments.client import AsyncCommentsClient -from .workspaces.client import AsyncWorkspacesClient -from .tokens.client import AsyncTokensClient -from .jwt_settings.client import AsyncJwtSettingsClient +from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .environment import LabelStudioEnvironment +from .export_storage.client import AsyncExportStorageClient, ExportStorageClient +from .files.client import AsyncFilesClient, FilesClient +from .import_storage.client import AsyncImportStorageClient, ImportStorageClient +from .jwt_settings.client import AsyncJwtSettingsClient, JwtSettingsClient +from .ml.client import AsyncMlClient, MlClient +from .model_providers.client import AsyncModelProvidersClient, ModelProvidersClient +from .predictions.client import AsyncPredictionsClient, PredictionsClient +from .projects.client import AsyncProjectsClient, ProjectsClient +from .prompts.client import AsyncPromptsClient, PromptsClient +from .tasks.client import AsyncTasksClient, TasksClient +from .tokens.client import AsyncTokensClient, TokensClient +from .users.client import AsyncUsersClient, UsersClient +from .versions.client import AsyncVersionsClient, VersionsClient +from .views.client import AsyncViewsClient, ViewsClient +from .webhooks.client import AsyncWebhooksClient, WebhooksClient +from .workspaces.client import AsyncWorkspacesClient, WorkspacesClient class LabelStudioBase: @@ -94,7 +75,9 @@ def __init__( follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.Client] = None, ): - _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None + _defaulted_timeout = ( + timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read + ) if api_key is None: raise ApiError( body="The client must be instantiated be either passing in api_key or setting LABEL_STUDIO_API_KEY" @@ -177,7 +160,9 @@ def __init__( follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.AsyncClient] = None, ): - _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None + _defaulted_timeout = ( + timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read + ) if api_key is None: raise ApiError( body="The client must be instantiated be either passing in api_key or setting LABEL_STUDIO_API_KEY" diff --git a/src/label_studio_sdk/comments/__init__.py b/src/label_studio_sdk/comments/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/comments/__init__.py +++ b/src/label_studio_sdk/comments/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/comments/client.py b/src/label_studio_sdk/comments/client.py index dec7e7a72..2a30a7ef3 100644 --- a/src/label_studio_sdk/comments/client.py +++ b/src/label_studio_sdk/comments/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.comment import Comment -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawCommentsClient, RawCommentsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class CommentsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawCommentsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawCommentsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawCommentsClient + """ + return self._raw_client def list( self, @@ -58,29 +66,10 @@ def list( ) client.comments.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/comments/", - method="GET", - params={ - "project": project, - "expand_created_by": expand_created_by, - "annotation": annotation, - }, - request_options=request_options, + _response = self._raw_client.list( + project=project, expand_created_by=expand_created_by, annotation=annotation, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Comment], - parse_obj_as( - type_=typing.List[Comment], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create( self, @@ -122,34 +111,10 @@ def create( ) client.comments.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/comments/", - method="POST", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + annotation=annotation, project=project, text=text, is_resolved=is_resolved, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ @@ -180,24 +145,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -227,18 +176,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -286,39 +225,31 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="PATCH", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + annotation=annotation, + project=project, + text=text, + is_resolved=is_resolved, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncCommentsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawCommentsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawCommentsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawCommentsClient + """ + return self._raw_client async def list( self, @@ -368,29 +299,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/comments/", - method="GET", - params={ - "project": project, - "expand_created_by": expand_created_by, - "annotation": annotation, - }, - request_options=request_options, + _response = await self._raw_client.list( + project=project, expand_created_by=expand_created_by, annotation=annotation, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Comment], - parse_obj_as( - type_=typing.List[Comment], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -440,34 +352,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/comments/", - method="POST", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + annotation=annotation, project=project, text=text, is_resolved=is_resolved, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ @@ -506,24 +394,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -561,18 +433,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -628,31 +490,12 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="PATCH", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + annotation=annotation, + project=project, + text=text, + is_resolved=is_resolved, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/comments/raw_client.py b/src/label_studio_sdk/comments/raw_client.py new file mode 100644 index 000000000..c60535a93 --- /dev/null +++ b/src/label_studio_sdk/comments/raw_client.py @@ -0,0 +1,529 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.comment import Comment + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawCommentsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + project: typing.Optional[int] = None, + expand_created_by: typing.Optional[bool] = None, + annotation: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[Comment]]: + """ + + Get a list of comments for a specific project. + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + expand_created_by : typing.Optional[bool] + Expand the created_by field with object instead of ID + + annotation : typing.Optional[int] + Annotation ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Comment]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/comments/", + method="GET", + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Comment]: + """ + + Create a new comment. + + Parameters + ---------- + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Comment] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/comments/", + method="POST", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Comment]: + """ + + Get a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Comment] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Comment]: + """ + + Update a specific comment. + + Parameters + ---------- + id : int + Comment ID + + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Comment] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="PATCH", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawCommentsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + project: typing.Optional[int] = None, + expand_created_by: typing.Optional[bool] = None, + annotation: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[Comment]]: + """ + + Get a list of comments for a specific project. + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + expand_created_by : typing.Optional[bool] + Expand the created_by field with object instead of ID + + annotation : typing.Optional[int] + Annotation ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Comment]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/comments/", + method="GET", + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Comment]: + """ + + Create a new comment. + + Parameters + ---------- + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Comment] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/comments/", + method="POST", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Comment]: + """ + + Get a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Comment] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Comment]: + """ + + Update a specific comment. + + Parameters + ---------- + id : int + Comment ID + + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Comment] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="PATCH", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/core/__init__.py b/src/label_studio_sdk/core/__init__.py index 42031ad0b..d1461de7c 100644 --- a/src/label_studio_sdk/core/__init__.py +++ b/src/label_studio_sdk/core/__init__.py @@ -1,10 +1,13 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .api_error import ApiError from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper from .datetime_utils import serialize_datetime from .file import File, convert_file_dict_to_httpx_tuples, with_content_type from .http_client import AsyncHttpClient, HttpClient +from .http_response import AsyncHttpResponse, HttpResponse from .jsonable_encoder import jsonable_encoder from .pagination import AsyncPager, SyncPager from .pydantic_utilities import ( @@ -25,11 +28,13 @@ "ApiError", "AsyncClientWrapper", "AsyncHttpClient", + "AsyncHttpResponse", "AsyncPager", "BaseClientWrapper", "FieldMetadata", "File", "HttpClient", + "HttpResponse", "IS_PYDANTIC_V2", "RequestOptions", "SyncClientWrapper", diff --git a/src/label_studio_sdk/core/api_error.py b/src/label_studio_sdk/core/api_error.py index 2e9fc5431..6f850a60c 100644 --- a/src/label_studio_sdk/core/api_error.py +++ b/src/label_studio_sdk/core/api_error.py @@ -1,15 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from typing import Any, Dict, Optional class ApiError(Exception): - status_code: typing.Optional[int] - body: typing.Any + headers: Optional[Dict[str, str]] + status_code: Optional[int] + body: Any - def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None): + def __init__( + self, + *, + headers: Optional[Dict[str, str]] = None, + status_code: Optional[int] = None, + body: Any = None, + ) -> None: + self.headers = headers self.status_code = status_code self.body = body def __str__(self) -> str: - return f"status_code: {self.status_code}, body: {self.body}" + return f"headers: {self.headers}, status_code: {self.status_code}, body: {self.body}" diff --git a/src/label_studio_sdk/core/force_multipart.py b/src/label_studio_sdk/core/force_multipart.py new file mode 100644 index 000000000..ae24ccff1 --- /dev/null +++ b/src/label_studio_sdk/core/force_multipart.py @@ -0,0 +1,16 @@ +# This file was auto-generated by Fern from our API Definition. + + +class ForceMultipartDict(dict): + """ + A dictionary subclass that always evaluates to True in boolean contexts. + + This is used to force multipart/form-data encoding in HTTP requests even when + the dictionary is empty, which would normally evaluate to False. + """ + + def __bool__(self): + return True + + +FORCE_MULTIPART = ForceMultipartDict() diff --git a/src/label_studio_sdk/core/http_client.py b/src/label_studio_sdk/core/http_client.py index 1a1a1311a..e4173f990 100644 --- a/src/label_studio_sdk/core/http_client.py +++ b/src/label_studio_sdk/core/http_client.py @@ -2,7 +2,6 @@ import asyncio import email.utils -import json import re import time import typing @@ -11,12 +10,13 @@ from random import random import httpx - from .file import File, convert_file_dict_to_httpx_tuples +from .force_multipart import FORCE_MULTIPART from .jsonable_encoder import jsonable_encoder from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions +from httpx._types import RequestFiles INITIAL_RETRY_DELAY_SECONDS = 0.5 MAX_RETRY_DELAY_SECONDS = 10 @@ -85,8 +85,8 @@ def _retry_timeout(response: httpx.Response, retries: int) -> float: def _should_retry(response: httpx.Response) -> bool: - retriable_400s = [429, 408, 409] - return response.status_code >= 500 or response.status_code in retriable_400s + retryable_400s = [429, 408, 409] + return response.status_code >= 500 or response.status_code in retryable_400s def remove_omit_from_dict( @@ -180,11 +180,17 @@ def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -195,6 +201,15 @@ def request( json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + response = self.httpx_client.request( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -227,11 +242,7 @@ def request( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit) - else None - ), + files=request_files, timeout=timeout, ) @@ -266,11 +277,17 @@ def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.Iterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -279,6 +296,15 @@ def stream( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) with self.httpx_client.stream( @@ -313,11 +339,7 @@ def stream( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit) - else None - ), + files=request_files, timeout=timeout, ) as stream: yield stream @@ -356,11 +378,17 @@ async def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -369,6 +397,15 @@ async def request( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) # Add the input to each of these and do None-safety checks @@ -404,11 +441,7 @@ async def request( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if files is not None - else None - ), + files=request_files, timeout=timeout, ) @@ -442,11 +475,17 @@ async def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, - retries: int = 0, + retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.AsyncIterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -455,6 +494,15 @@ async def stream( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) async with self.httpx_client.stream( @@ -489,11 +537,7 @@ async def stream( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if files is not None - else None - ), + files=request_files, timeout=timeout, ) as stream: yield stream diff --git a/src/label_studio_sdk/core/http_response.py b/src/label_studio_sdk/core/http_response.py new file mode 100644 index 000000000..48a1798a5 --- /dev/null +++ b/src/label_studio_sdk/core/http_response.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Dict, Generic, TypeVar + +import httpx + +T = TypeVar("T") +"""Generic to represent the underlying type of the data wrapped by the HTTP response.""" + + +class BaseHttpResponse: + """Minimalist HTTP response wrapper that exposes response headers.""" + + _response: httpx.Response + + def __init__(self, response: httpx.Response): + self._response = response + + @property + def headers(self) -> Dict[str, str]: + return dict(self._response.headers) + + +class HttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + def close(self) -> None: + self._response.close() + + +class AsyncHttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + async def close(self) -> None: + await self._response.aclose() diff --git a/src/label_studio_sdk/core/jsonable_encoder.py b/src/label_studio_sdk/core/jsonable_encoder.py index 1b631e901..afee3662d 100644 --- a/src/label_studio_sdk/core/jsonable_encoder.py +++ b/src/label_studio_sdk/core/jsonable_encoder.py @@ -17,7 +17,6 @@ from typing import Any, Callable, Dict, List, Optional, Set, Union import pydantic - from .datetime_utils import serialize_datetime from .pydantic_utilities import ( IS_PYDANTIC_V2, diff --git a/src/label_studio_sdk/core/pagination.py b/src/label_studio_sdk/core/pagination.py index 5f482635a..209a1ff14 100644 --- a/src/label_studio_sdk/core/pagination.py +++ b/src/label_studio_sdk/core/pagination.py @@ -1,16 +1,17 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from __future__ import annotations -from typing_extensions import Self +from dataclasses import dataclass +from typing import AsyncIterator, Awaitable, Callable, Generic, Iterator, List, Optional, TypeVar -import pydantic +from .http_response import BaseHttpResponse -# Generic to represent the underlying type of the results within a page -T = typing.TypeVar("T") +T = TypeVar("T") +"""Generic to represent the underlying type of the results within a page""" -# SDKs implement a Page ABC per-pagination request, the endpoint then retuns a pager that wraps this type +# SDKs implement a Page ABC per-pagination request, the endpoint then returns a pager that wraps this type # for example, an endpoint will return SyncPager[UserPage] where UserPage implements the Page ABC. ex: # # SyncPager( @@ -19,70 +20,63 @@ # # This should be the outer function that returns the SyncPager again # get_next=lambda: list(..., cursor: response.cursor) (or list(..., offset: offset + 1)) # ) -class BasePage(pydantic.BaseModel, typing.Generic[T]): - has_next: bool - items: typing.Optional[typing.List[T]] - - -class SyncPage(BasePage[T], typing.Generic[T]): - get_next: typing.Optional[typing.Callable[[], typing.Optional[Self]]] - - -class AsyncPage(BasePage[T], typing.Generic[T]): - get_next: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Optional[Self]]]] -# ---------------------------- - +@dataclass(frozen=True) +class SyncPager(Generic[T]): + get_next: Optional[Callable[[], Optional[SyncPager[T]]]] + has_next: bool + items: Optional[List[T]] + response: Optional[BaseHttpResponse] -class SyncPager(SyncPage[T], typing.Generic[T]): # Here we type ignore the iterator to avoid a mypy error # caused by the type conflict with Pydanitc's __iter__ method # brought in by extending the base model - def __iter__(self) -> typing.Iterator[T]: # type: ignore + def __iter__(self) -> Iterator[T]: # type: ignore[override] for page in self.iter_pages(): if page.items is not None: - for item in page.items: - yield item + yield from page.items - def iter_pages(self) -> typing.Iterator[SyncPage[T]]: - page: typing.Union[SyncPager[T], None] = self - while True: - if page is not None: - yield page - if page.has_next and page.get_next is not None: - page = page.get_next() - if page is None or page.items is None or len(page.items) == 0: - return - else: - return - else: + def iter_pages(self) -> Iterator[SyncPager[T]]: + page: Optional[SyncPager[T]] = self + while page is not None: + yield page + + if not page.has_next or page.get_next is None: + return + + page = page.get_next() + if page is None or page.items is None or len(page.items) == 0: return - def next_page(self) -> typing.Optional[SyncPage[T]]: + def next_page(self) -> Optional[SyncPager[T]]: return self.get_next() if self.get_next is not None else None -class AsyncPager(AsyncPage[T], typing.Generic[T]): - async def __aiter__(self) -> typing.AsyncIterator[T]: # type: ignore +@dataclass(frozen=True) +class AsyncPager(Generic[T]): + get_next: Optional[Callable[[], Awaitable[Optional[AsyncPager[T]]]]] + has_next: bool + items: Optional[List[T]] + response: Optional[BaseHttpResponse] + + async def __aiter__(self) -> AsyncIterator[T]: async for page in self.iter_pages(): if page.items is not None: for item in page.items: yield item - async def iter_pages(self) -> typing.AsyncIterator[AsyncPage[T]]: - page: typing.Union[AsyncPager[T], None] = self - while True: - if page is not None: - yield page - if page is not None and page.has_next and page.get_next is not None: - page = await page.get_next() - if page is None or page.items is None or len(page.items) == 0: - return - else: - return - else: + async def iter_pages(self) -> AsyncIterator[AsyncPager[T]]: + page: Optional[AsyncPager[T]] = self + while page is not None: + yield page + + if not page.has_next or page.get_next is None: + return + + page = await page.get_next() + if page is None or page.items is None or len(page.items) == 0: return - async def next_page(self) -> typing.Optional[AsyncPage[T]]: + async def next_page(self) -> Optional[AsyncPager[T]]: return await self.get_next() if self.get_next is not None else None diff --git a/src/label_studio_sdk/core/pydantic_utilities.py b/src/label_studio_sdk/core/pydantic_utilities.py index ee8f0e410..0360ef49a 100644 --- a/src/label_studio_sdk/core/pydantic_utilities.py +++ b/src/label_studio_sdk/core/pydantic_utilities.py @@ -2,89 +2,65 @@ # nopycln: file import datetime as dt -import typing from collections import defaultdict - -import typing_extensions +from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast import pydantic -from .datetime_utils import serialize_datetime -from .serialization import convert_and_respect_annotation_metadata - IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") if IS_PYDANTIC_V2: - # isort will try to reformat the comments on these imports, which breaks mypy - # isort: off - from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - parse_date as parse_date, - ) - from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - parse_datetime as parse_datetime, - ) - from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - ENCODERS_BY_TYPE as encoders_by_type, - ) - from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - get_args as get_args, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - get_origin as get_origin, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - is_literal_type as is_literal_type, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - is_union as is_union, - ) - from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + from pydantic.v1.datetime_parse import parse_date as parse_date + from pydantic.v1.datetime_parse import parse_datetime as parse_datetime + from pydantic.v1.fields import ModelField as ModelField + from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined] + from pydantic.v1.typing import get_args as get_args + from pydantic.v1.typing import get_origin as get_origin + from pydantic.v1.typing import is_literal_type as is_literal_type + from pydantic.v1.typing import is_union as is_union else: - from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 - from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 - from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 - from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 - from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 - from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 - from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 - from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 - - # isort: on + from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef] + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef] + from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef] + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef] + from pydantic.typing import get_args as get_args # type: ignore[no-redef] + from pydantic.typing import get_origin as get_origin # type: ignore[no-redef] + from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef] + from pydantic.typing import is_union as is_union # type: ignore[no-redef] +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata +from typing_extensions import TypeAlias -T = typing.TypeVar("T") -Model = typing.TypeVar("Model", bound=pydantic.BaseModel) +T = TypeVar("T") +Model = TypeVar("Model", bound=pydantic.BaseModel) -def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: +def parse_obj_as(type_: Type[T], object_: Any) -> T: dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") if IS_PYDANTIC_V2: - adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 + adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined] return adapter.validate_python(dealiased_object) - else: - return pydantic.parse_obj_as(type_, dealiased_object) + return pydantic.parse_obj_as(type_, dealiased_object) -def to_jsonable_with_fallback( - obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] -) -> typing.Any: +def to_jsonable_with_fallback(obj: Any, fallback_serializer: Callable[[Any], Any]) -> Any: if IS_PYDANTIC_V2: from pydantic_core import to_jsonable_python return to_jsonable_python(obj, fallback=fallback_serializer) - else: - return fallback_serializer(obj) + return fallback_serializer(obj) class UniversalBaseModel(pydantic.BaseModel): if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( - # Allow fields begining with `model_` to be used in the model + model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key] + # Allow fields beginning with `model_` to be used in the model protected_namespaces=(), - ) # type: ignore # Pydantic v2 + ) - @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2 - def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2 + @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore[attr-defined] + def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> Any: # type: ignore[name-defined] serialized = handler(self) data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} return data @@ -96,59 +72,53 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} @classmethod - def model_construct( - cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any - ) -> "Model": + def model_construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") return cls.construct(_fields_set, **dealiased_object) @classmethod - def construct( - cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any - ) -> "Model": + def construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") if IS_PYDANTIC_V2: - return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2 - else: - return super().construct(_fields_set, **dealiased_object) + return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc] + return super().construct(_fields_set, **dealiased_object) - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { + def json(self, **kwargs: Any) -> str: + kwargs_with_defaults = { "by_alias": True, "exclude_unset": True, **kwargs, } if IS_PYDANTIC_V2: - return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 - else: - return super().json(**kwargs_with_defaults) + return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc] + return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + def dict(self, **kwargs: Any) -> Dict[str, Any]: """ Override the default dict method to `exclude_unset` by default. This function patches `exclude_unset` to work include fields within non-None default values. """ - # Note: the logic here is multi-plexed given the levers exposed in Pydantic V1 vs V2 + # Note: the logic here is multiplexed given the levers exposed in Pydantic V1 vs V2 # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice. # # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models # that we have less control over, and this is less intrusive than custom serializers for now. if IS_PYDANTIC_V2: - kwargs_with_defaults_exclude_unset: typing.Any = { + kwargs_with_defaults_exclude_unset = { **kwargs, "by_alias": True, "exclude_unset": True, "exclude_none": False, } - kwargs_with_defaults_exclude_none: typing.Any = { + kwargs_with_defaults_exclude_none = { **kwargs, "by_alias": True, "exclude_none": True, "exclude_unset": False, } dict_dump = deep_union_pydantic_dicts( - super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 - super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc] + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc] ) else: @@ -168,7 +138,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: if default is not None: self.__fields_set__.add(name) - kwargs_with_defaults_exclude_unset_include_fields: typing.Any = { + kwargs_with_defaults_exclude_unset_include_fields = { "by_alias": True, "exclude_unset": True, "include": _fields_set, @@ -180,12 +150,10 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write") -def _union_list_of_pydantic_dicts( - source: typing.List[typing.Any], destination: typing.List[typing.Any] -) -> typing.List[typing.Any]: - converted_list: typing.List[typing.Any] = [] +def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]: + converted_list: List[Any] = [] for i, item in enumerate(source): - destination_value = destination[i] # type: ignore + destination_value = destination[i] if isinstance(item, dict): converted_list.append(deep_union_pydantic_dicts(item, destination_value)) elif isinstance(item, list): @@ -195,9 +163,7 @@ def _union_list_of_pydantic_dicts( return converted_list -def deep_union_pydantic_dicts( - source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] -) -> typing.Dict[str, typing.Any]: +def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any]) -> Dict[str, Any]: for key, value in source.items(): node = destination.setdefault(key, {}) if isinstance(value, dict): @@ -215,18 +181,16 @@ def deep_union_pydantic_dicts( if IS_PYDANTIC_V2: - class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg] pass - UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore + UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc] else: - UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore + UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef] -def encode_by_type(o: typing.Any) -> typing.Any: - encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = ( - defaultdict(tuple) - ) +def encode_by_type(o: Any) -> Any: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) for type_, encoder in encoders_by_type.items(): encoders_by_class_tuples[encoder] += (type_,) @@ -237,54 +201,49 @@ def encode_by_type(o: typing.Any) -> typing.Any: return encoder(o) -def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: +def update_forward_refs(model: Type["Model"], **localns: Any) -> None: if IS_PYDANTIC_V2: - model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2 + model.model_rebuild(raise_errors=False) # type: ignore[attr-defined] else: model.update_forward_refs(**localns) # Mirrors Pydantic's internal typing -AnyCallable = typing.Callable[..., typing.Any] +AnyCallable = Callable[..., Any] def universal_root_validator( pre: bool = False, -) -> typing.Callable[[AnyCallable], AnyCallable]: +) -> Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 - else: - return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 + return cast(AnyCallable, pydantic.model_validator(mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload] return decorator -def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: +def universal_field_validator(field_name: str, pre: bool = False) -> Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 - else: - return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1 + return cast(AnyCallable, pydantic.field_validator(field_name, mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func)) return decorator -PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo] +PydanticField = Union[ModelField, pydantic.fields.FieldInfo] -def _get_model_fields( - model: typing.Type["Model"], -) -> typing.Mapping[str, PydanticField]: +def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]: if IS_PYDANTIC_V2: - return model.model_fields # type: ignore # Pydantic v2 - else: - return model.__fields__ # type: ignore # Pydantic v1 + return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined] + return cast(Mapping[str, PydanticField], model.__fields__) -def _get_field_default(field: PydanticField) -> typing.Any: +def _get_field_default(field: PydanticField) -> Any: try: - value = field.get_default() # type: ignore # Pydantic < v1.10.15 + value = field.get_default() # type: ignore[union-attr] except: value = field.default if IS_PYDANTIC_V2: diff --git a/src/label_studio_sdk/core/serialization.py b/src/label_studio_sdk/core/serialization.py index cb5dcbf93..c36e865cc 100644 --- a/src/label_studio_sdk/core/serialization.py +++ b/src/label_studio_sdk/core/serialization.py @@ -4,9 +4,8 @@ import inspect import typing -import typing_extensions - import pydantic +import typing_extensions class FieldMetadata: @@ -161,7 +160,12 @@ def _convert_mapping( direction: typing.Literal["read", "write"], ) -> typing.Mapping[str, object]: converted_object: typing.Dict[str, object] = {} - annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + try: + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + except NameError: + # The TypedDict contains a circular reference, so + # we use the __annotations__ attribute directly. + annotations = getattr(expected_type, "__annotations__", {}) aliases_to_field_names = _get_alias_to_field_name(annotations) for key, value in object_.items(): if direction == "read" and key in aliases_to_field_names: diff --git a/src/label_studio_sdk/errors/__init__.py b/src/label_studio_sdk/errors/__init__.py index 076c429b4..27c365534 100644 --- a/src/label_studio_sdk/errors/__init__.py +++ b/src/label_studio_sdk/errors/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .bad_request_error import BadRequestError from .internal_server_error import InternalServerError from .not_found_error import NotFoundError diff --git a/src/label_studio_sdk/errors/bad_request_error.py b/src/label_studio_sdk/errors/bad_request_error.py index 9c13c61f9..baf5be4f7 100644 --- a/src/label_studio_sdk/errors/bad_request_error.py +++ b/src/label_studio_sdk/errors/bad_request_error.py @@ -1,9 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class BadRequestError(ApiError): - def __init__(self, body: typing.Optional[typing.Any]): - super().__init__(status_code=400, body=body) + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=400, headers=headers, body=body) diff --git a/src/label_studio_sdk/errors/internal_server_error.py b/src/label_studio_sdk/errors/internal_server_error.py index b4d235549..2c9be920b 100644 --- a/src/label_studio_sdk/errors/internal_server_error.py +++ b/src/label_studio_sdk/errors/internal_server_error.py @@ -1,8 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +import typing + from ..core.api_error import ApiError class InternalServerError(ApiError): - def __init__(self, body: str): - super().__init__(status_code=500, body=body) + def __init__(self, body: str, headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=500, headers=headers, body=body) diff --git a/src/label_studio_sdk/errors/not_found_error.py b/src/label_studio_sdk/errors/not_found_error.py index a1235b87f..dcd60e383 100644 --- a/src/label_studio_sdk/errors/not_found_error.py +++ b/src/label_studio_sdk/errors/not_found_error.py @@ -1,9 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class NotFoundError(ApiError): - def __init__(self, body: typing.Optional[typing.Any]): - super().__init__(status_code=404, body=body) + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=404, headers=headers, body=body) diff --git a/src/label_studio_sdk/errors/unauthorized_error.py b/src/label_studio_sdk/errors/unauthorized_error.py index 1c00f98ab..c83b25c26 100644 --- a/src/label_studio_sdk/errors/unauthorized_error.py +++ b/src/label_studio_sdk/errors/unauthorized_error.py @@ -1,9 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class UnauthorizedError(ApiError): - def __init__(self, body: typing.Optional[typing.Any]): - super().__init__(status_code=401, body=body) + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=401, headers=headers, body=body) diff --git a/src/label_studio_sdk/export_storage/__init__.py b/src/label_studio_sdk/export_storage/__init__.py index 0203a293b..635b53022 100644 --- a/src/label_studio_sdk/export_storage/__init__.py +++ b/src/label_studio_sdk/export_storage/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ExportStorageListTypesResponseItem from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse diff --git a/src/label_studio_sdk/export_storage/azure/__init__.py b/src/label_studio_sdk/export_storage/azure/__init__.py index 323fc5f3d..97dcea344 100644 --- a/src/label_studio_sdk/export_storage/azure/__init__.py +++ b/src/label_studio_sdk/export_storage/azure/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import AzureCreateResponse, AzureUpdateResponse __all__ = ["AzureCreateResponse", "AzureUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/azure/client.py b/src/label_studio_sdk/export_storage/azure/client.py index 602fff253..c7314088a 100644 --- a/src/label_studio_sdk/export_storage/azure/client.py +++ b/src/label_studio_sdk/export_storage/azure/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_export_storage import AzureBlobExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawAzureClient, RawAzureClient from .types.azure_create_response import AzureCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class AzureClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAzureClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.export_storage.azure.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobExportStorage], - parse_obj_as( - type_=typing.List[AzureBlobExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -139,38 +128,18 @@ def create( ) client.export_storage.azure.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -235,33 +204,19 @@ def validate( ) client.export_storage.azure.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -294,24 +249,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -343,18 +282,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -424,38 +353,19 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -489,29 +399,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncAzureClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAzureClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -554,27 +459,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobExportStorage], - parse_obj_as( - type_=typing.List[AzureBlobExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -648,38 +534,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -752,33 +618,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -819,24 +671,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -876,18 +712,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -965,38 +791,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -1038,21 +845,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/azure/raw_client.py b/src/label_studio_sdk/export_storage/azure/raw_client.py new file mode 100644 index 000000000..12ef87da6 --- /dev/null +++ b/src/label_studio_sdk/export_storage/azure/raw_client.py @@ -0,0 +1,881 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.azure_blob_export_storage import AzureBlobExportStorage +from .types.azure_create_response import AzureCreateResponse +from .types.azure_update_response import AzureUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAzureClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[AzureBlobExportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[AzureBlobExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureCreateResponse]: + """ + + Create a new target storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobExportStorage]: + """ + + Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobExportStorage]: + """ + + Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawAzureClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[AzureBlobExportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[AzureBlobExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureCreateResponse]: + """ + + Create a new target storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobExportStorage]: + """ + + Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobExportStorage]: + """ + + Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/azure/types/__init__.py b/src/label_studio_sdk/export_storage/azure/types/__init__.py index 0cb2cdbbc..e56fb71c1 100644 --- a/src/label_studio_sdk/export_storage/azure/types/__init__.py +++ b/src/label_studio_sdk/export_storage/azure/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .azure_create_response import AzureCreateResponse from .azure_update_response import AzureUpdateResponse diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py index d82c30787..1f374501b 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py index e530bafb9..286118f96 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/client.py b/src/label_studio_sdk/export_storage/client.py index abb4cd3d9..d0158fe01 100644 --- a/src/label_studio_sdk/export_storage/client.py +++ b/src/label_studio_sdk/export_storage/client.py @@ -1,36 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.client_wrapper import SyncClientWrapper -from .azure.client import AzureClient -from .gcs.client import GcsClient -from .local.client import LocalClient -from .redis.client import RedisClient -from .s3.client import S3Client -from .s3s.client import S3SClient import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions +from .azure.client import AsyncAzureClient, AzureClient +from .gcs.client import AsyncGcsClient, GcsClient +from .local.client import AsyncLocalClient, LocalClient +from .raw_client import AsyncRawExportStorageClient, RawExportStorageClient +from .redis.client import AsyncRedisClient, RedisClient +from .s3.client import AsyncS3Client, S3Client +from .s3s.client import AsyncS3SClient, S3SClient from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper -from .azure.client import AsyncAzureClient -from .gcs.client import AsyncGcsClient -from .local.client import AsyncLocalClient -from .redis.client import AsyncRedisClient -from .s3.client import AsyncS3Client -from .s3s.client import AsyncS3SClient class ExportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AzureClient(client_wrapper=self._client_wrapper) - self.gcs = GcsClient(client_wrapper=self._client_wrapper) - self.local = LocalClient(client_wrapper=self._client_wrapper) - self.redis = RedisClient(client_wrapper=self._client_wrapper) - self.s3 = S3Client(client_wrapper=self._client_wrapper) - self.s3s = S3SClient(client_wrapper=self._client_wrapper) + self._raw_client = RawExportStorageClient(client_wrapper=client_wrapper) + self.azure = AzureClient(client_wrapper=client_wrapper) + + self.gcs = GcsClient(client_wrapper=client_wrapper) + + self.local = LocalClient(client_wrapper=client_wrapper) + + self.redis = RedisClient(client_wrapper=client_wrapper) + + self.s3 = S3Client(client_wrapper=client_wrapper) + + self.s3s = S3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawExportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawExportStorageClient + """ + return self._raw_client def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -57,35 +65,35 @@ def list_types( ) client.export_storage.list_types() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list_types(request_options=request_options) + return _response.data class AsyncExportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) - self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) - self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) - self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) - self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) - self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawExportStorageClient(client_wrapper=client_wrapper) + self.azure = AsyncAzureClient(client_wrapper=client_wrapper) + + self.gcs = AsyncGcsClient(client_wrapper=client_wrapper) + + self.local = AsyncLocalClient(client_wrapper=client_wrapper) + + self.redis = AsyncRedisClient(client_wrapper=client_wrapper) + + self.s3 = AsyncS3Client(client_wrapper=client_wrapper) + + self.s3s = AsyncS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawExportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawExportStorageClient + """ + return self._raw_client async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -120,21 +128,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_types(request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/gcs/__init__.py b/src/label_studio_sdk/export_storage/gcs/__init__.py index 2c4b3d376..7054c2af9 100644 --- a/src/label_studio_sdk/export_storage/gcs/__init__.py +++ b/src/label_studio_sdk/export_storage/gcs/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import GcsCreateResponse, GcsUpdateResponse __all__ = ["GcsCreateResponse", "GcsUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/gcs/client.py b/src/label_studio_sdk/export_storage/gcs/client.py index 708cf7192..e1ac120d2 100644 --- a/src/label_studio_sdk/export_storage/gcs/client.py +++ b/src/label_studio_sdk/export_storage/gcs/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_export_storage import GcsExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawGcsClient, RawGcsClient from .types.gcs_create_response import GcsCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class GcsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawGcsClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.export_storage.gcs.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsExportStorage], - parse_obj_as( - type_=typing.List[GcsExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -139,38 +128,18 @@ def create( ) client.export_storage.gcs.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -235,33 +204,19 @@ def validate( ) client.export_storage.gcs.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -294,24 +249,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -343,18 +282,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -424,38 +353,19 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -489,29 +399,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncGcsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawGcsClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -554,27 +459,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsExportStorage], - parse_obj_as( - type_=typing.List[GcsExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -648,38 +534,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -752,33 +618,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -819,24 +671,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -876,18 +712,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -965,38 +791,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -1038,21 +845,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/gcs/raw_client.py b/src/label_studio_sdk/export_storage/gcs/raw_client.py new file mode 100644 index 000000000..700e69127 --- /dev/null +++ b/src/label_studio_sdk/export_storage/gcs/raw_client.py @@ -0,0 +1,881 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.gcs_export_storage import GcsExportStorage +from .types.gcs_create_response import GcsCreateResponse +from .types.gcs_update_response import GcsUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawGcsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[GcsExportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[GcsExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsCreateResponse]: + """ + + Create a new target storage connection to Google Cloud Storage. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsExportStorage]: + """ + + Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsExportStorage]: + """ + + Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawGcsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[GcsExportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[GcsExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsCreateResponse]: + """ + + Create a new target storage connection to Google Cloud Storage. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsExportStorage]: + """ + + Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsExportStorage]: + """ + + Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/gcs/types/__init__.py b/src/label_studio_sdk/export_storage/gcs/types/__init__.py index 832c1ee1c..73d049459 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/__init__.py +++ b/src/label_studio_sdk/export_storage/gcs/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .gcs_create_response import GcsCreateResponse from .gcs_update_response import GcsUpdateResponse diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py index 955c9d0cf..f4c6c63a0 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py index 48f05d0f3..86860ea58 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/local/__init__.py b/src/label_studio_sdk/export_storage/local/__init__.py index 248109b66..44e4524e0 100644 --- a/src/label_studio_sdk/export_storage/local/__init__.py +++ b/src/label_studio_sdk/export_storage/local/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import LocalCreateResponse, LocalUpdateResponse __all__ = ["LocalCreateResponse", "LocalUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/local/client.py b/src/label_studio_sdk/export_storage/local/client.py index b13fc22e0..3201ddb45 100644 --- a/src/label_studio_sdk/export_storage/local/client.py +++ b/src/label_studio_sdk/export_storage/local/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_export_storage import LocalFilesExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawLocalClient, RawLocalClient from .types.local_create_response import LocalCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class LocalClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawLocalClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.export_storage.local.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesExportStorage], - parse_obj_as( - type_=typing.List[LocalFilesExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -131,36 +120,16 @@ def create( ) client.export_storage.local.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -217,31 +186,17 @@ def validate( ) client.export_storage.local.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -274,24 +229,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -323,18 +262,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -396,36 +325,17 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -459,29 +369,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncLocalClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawLocalClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -524,27 +429,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesExportStorage], - parse_obj_as( - type_=typing.List[LocalFilesExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -610,36 +496,16 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -704,31 +570,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -769,24 +621,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -826,18 +662,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -907,36 +733,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -980,21 +787,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/local/raw_client.py b/src/label_studio_sdk/export_storage/local/raw_client.py new file mode 100644 index 000000000..1019d32ec --- /dev/null +++ b/src/label_studio_sdk/export_storage/local/raw_client.py @@ -0,0 +1,821 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.local_files_export_storage import LocalFilesExportStorage +from .types.local_create_response import LocalCreateResponse +from .types.local_update_response import LocalUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawLocalClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[LocalFilesExportStorage]]: + """ + + You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[LocalFilesExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalCreateResponse]: + """ + + Create a new target storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesExportStorage]: + """ + + Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalUpdateResponse]: + """ + + Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesExportStorage]: + """ + + Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawLocalClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[LocalFilesExportStorage]]: + """ + + You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[LocalFilesExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalCreateResponse]: + """ + + Create a new target storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesExportStorage]: + """ + + Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalUpdateResponse]: + """ + + Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesExportStorage]: + """ + + Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/local/types/__init__.py b/src/label_studio_sdk/export_storage/local/types/__init__.py index 9a12e8745..5f88d9245 100644 --- a/src/label_studio_sdk/export_storage/local/types/__init__.py +++ b/src/label_studio_sdk/export_storage/local/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .local_create_response import LocalCreateResponse from .local_update_response import LocalUpdateResponse diff --git a/src/label_studio_sdk/export_storage/local/types/local_create_response.py b/src/label_studio_sdk/export_storage/local/types/local_create_response.py index 95051747a..4f45cad89 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/local/types/local_update_response.py b/src/label_studio_sdk/export_storage/local/types/local_update_response.py index e5dd8df6c..885189c74 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/raw_client.py b/src/label_studio_sdk/export_storage/raw_client.py new file mode 100644 index 000000000..d843d01dc --- /dev/null +++ b/src/label_studio_sdk/export_storage/raw_client.py @@ -0,0 +1,93 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem + + +class RawExportStorageClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ExportStorageListTypesResponseItem]]: + """ + Retrieve a list of the export storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ExportStorageListTypesResponseItem]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawExportStorageClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ExportStorageListTypesResponseItem]]: + """ + Retrieve a list of the export storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ExportStorageListTypesResponseItem]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/redis/__init__.py b/src/label_studio_sdk/export_storage/redis/__init__.py index 7f87f18fe..e52cb2ace 100644 --- a/src/label_studio_sdk/export_storage/redis/__init__.py +++ b/src/label_studio_sdk/export_storage/redis/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import RedisCreateResponse, RedisUpdateResponse __all__ = ["RedisCreateResponse", "RedisUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/redis/client.py b/src/label_studio_sdk/export_storage/redis/client.py index f68ef2332..6a2ba5f52 100644 --- a/src/label_studio_sdk/export_storage/redis/client.py +++ b/src/label_studio_sdk/export_storage/redis/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_export_storage import RedisExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawRedisClient, RawRedisClient from .types.redis_create_response import RedisCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class RedisClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawRedisClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.export_storage.redis.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisExportStorage], - parse_obj_as( - type_=typing.List[RedisExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -143,39 +132,19 @@ def create( ) client.export_storage.redis.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="POST", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -244,34 +213,20 @@ def validate( ) client.export_storage.redis.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis/validate", - method="POST", - json={ - "id": id, - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -304,24 +259,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -353,18 +292,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -438,39 +367,20 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -504,29 +414,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncRedisClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawRedisClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -569,27 +474,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisExportStorage], - parse_obj_as( - type_=typing.List[RedisExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -667,39 +553,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="POST", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -776,34 +642,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis/validate", - method="POST", - json={ - "id": id, - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -844,24 +696,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -901,18 +737,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -994,39 +820,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -1068,21 +875,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/redis/raw_client.py b/src/label_studio_sdk/export_storage/redis/raw_client.py new file mode 100644 index 000000000..707afb81e --- /dev/null +++ b/src/label_studio_sdk/export_storage/redis/raw_client.py @@ -0,0 +1,911 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.redis_export_storage import RedisExportStorage +from .types.redis_create_response import RedisCreateResponse +from .types.redis_update_response import RedisUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawRedisClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[RedisExportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[RedisExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisCreateResponse]: + """ + + Create a new target storage connection to Redis. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="POST", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis/validate", + method="POST", + json={ + "id": id, + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisExportStorage]: + """ + + Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisExportStorage]: + """ + + Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawRedisClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[RedisExportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[RedisExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisCreateResponse]: + """ + + Create a new target storage connection to Redis. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="POST", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis/validate", + method="POST", + json={ + "id": id, + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisExportStorage]: + """ + + Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisExportStorage]: + """ + + Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/redis/types/__init__.py b/src/label_studio_sdk/export_storage/redis/types/__init__.py index b3557bc08..aea7ed291 100644 --- a/src/label_studio_sdk/export_storage/redis/types/__init__.py +++ b/src/label_studio_sdk/export_storage/redis/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .redis_create_response import RedisCreateResponse from .redis_update_response import RedisUpdateResponse diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py index 7aab4a479..c57c0ace4 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py index 8eec3c821..7787c488a 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3/__init__.py b/src/label_studio_sdk/export_storage/s3/__init__.py index c749fe227..e6421caaf 100644 --- a/src/label_studio_sdk/export_storage/s3/__init__.py +++ b/src/label_studio_sdk/export_storage/s3/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import S3CreateResponse, S3UpdateResponse __all__ = ["S3CreateResponse", "S3UpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/s3/client.py b/src/label_studio_sdk/export_storage/s3/client.py index 7b7331d46..d32dc2b76 100644 --- a/src/label_studio_sdk/export_storage/s3/client.py +++ b/src/label_studio_sdk/export_storage/s3/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3export_storage import S3ExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawS3Client, RawS3Client from .types.s3create_response import S3CreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class S3Client: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3Client + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.export_storage.s3.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ExportStorage], - parse_obj_as( - type_=typing.List[S3ExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -155,42 +144,22 @@ def create( ) client.export_storage.s3.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -271,37 +240,23 @@ def validate( ) client.export_storage.s3.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -334,24 +289,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -383,18 +322,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -480,42 +409,23 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -549,29 +459,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncS3Client: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3Client + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -614,27 +519,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ExportStorage], - parse_obj_as( - type_=typing.List[S3ExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -724,42 +610,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -848,37 +714,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -919,24 +771,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -976,18 +812,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1081,42 +907,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -1158,21 +965,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/s3/raw_client.py b/src/label_studio_sdk/export_storage/s3/raw_client.py new file mode 100644 index 000000000..ed6014980 --- /dev/null +++ b/src/label_studio_sdk/export_storage/s3/raw_client.py @@ -0,0 +1,999 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3export_storage import S3ExportStorage +from .types.s3create_response import S3CreateResponse +from .types.s3update_response import S3UpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3Client: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3ExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3ExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3CreateResponse]: + """ + + Create a new target storage connection to S3 storage. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3CreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[S3ExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3UpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3ExportStorage]: + """ + + Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3Client: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3ExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3ExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3CreateResponse]: + """ + + Create a new target storage connection to S3 storage. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3CreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3UpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ExportStorage]: + """ + + Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/s3/types/__init__.py b/src/label_studio_sdk/export_storage/s3/types/__init__.py index 3cc20ce7b..bb333983a 100644 --- a/src/label_studio_sdk/export_storage/s3/types/__init__.py +++ b/src/label_studio_sdk/export_storage/s3/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .s3create_response import S3CreateResponse from .s3update_response import S3UpdateResponse diff --git a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py index cc74a5583..ff883c191 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3CreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py index 466c2b64c..c3ee36914 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3UpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3s/__init__.py b/src/label_studio_sdk/export_storage/s3s/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/export_storage/s3s/__init__.py +++ b/src/label_studio_sdk/export_storage/s3s/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/export_storage/s3s/client.py b/src/label_studio_sdk/export_storage/s3s/client.py index 460586a50..6dde0d936 100644 --- a/src/label_studio_sdk/export_storage/s3s/client.py +++ b/src/label_studio_sdk/export_storage/s3s/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3s_export_storage import S3SExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...core.jsonable_encoder import jsonable_encoder -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawS3SClient, RawS3SClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class S3SClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3SClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -51,27 +59,8 @@ def list( ) client.export_storage.s3s.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SExportStorage], - parse_obj_as( - type_=typing.List[S3SExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -143,40 +132,20 @@ def create( ) client.export_storage.s3s.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ @@ -207,24 +176,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -254,18 +207,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -341,40 +284,21 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -443,39 +367,36 @@ def validate( ) client.export_storage.s3s.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s/validate", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncS3SClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3SClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -518,27 +439,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SExportStorage], - parse_obj_as( - type_=typing.List[S3SExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -618,40 +520,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ @@ -690,24 +572,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -745,18 +611,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -840,40 +696,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -950,31 +787,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s/validate", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/export_storage/s3s/raw_client.py b/src/label_studio_sdk/export_storage/s3s/raw_client.py new file mode 100644 index 000000000..c230c2660 --- /dev/null +++ b/src/label_studio_sdk/export_storage/s3s/raw_client.py @@ -0,0 +1,827 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3s_export_storage import S3SExportStorage + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3SClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3SExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3SExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SExportStorage]: + """ + + Create a new target storage connection to a S3 bucket with IAM role access. + + For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3SExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SExportStorage]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3SClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3SExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3SExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SExportStorage]: + """ + + Create a new target storage connection to a S3 bucket with IAM role access. + + For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3SExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SExportStorage]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/types/__init__.py b/src/label_studio_sdk/export_storage/types/__init__.py index 58de62c07..1539a9784 100644 --- a/src/label_studio_sdk/export_storage/types/__init__.py +++ b/src/label_studio_sdk/export_storage/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .export_storage_list_types_response_item import ExportStorageListTypesResponseItem __all__ = ["ExportStorageListTypesResponseItem"] diff --git a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py index 684e9172a..50f3659ad 100644 --- a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py +++ b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ExportStorageListTypesResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/files/__init__.py b/src/label_studio_sdk/files/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/files/__init__.py +++ b/src/label_studio_sdk/files/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/files/client.py b/src/label_studio_sdk/files/client.py index a832258c3..9b68c2ce6 100644 --- a/src/label_studio_sdk/files/client.py +++ b/src/label_studio_sdk/files/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.file_upload import FileUpload -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawFilesClient, RawFilesClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class FilesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawFilesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawFilesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawFilesClient + """ + return self._raw_client def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> FileUpload: """ @@ -46,24 +54,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -92,18 +84,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -150,29 +132,8 @@ def update( id_=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id_)}", - method="PATCH", - json={ - "id": id, - "file": file, - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.update(id_, id=id, file=file, request_options=request_options) + return _response.data def list( self, @@ -218,28 +179,8 @@ def list( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="GET", - params={ - "all": all_, - "ids": ids, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[FileUpload], - parse_obj_as( - type_=typing.List[FileUpload], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, all_=all_, ids=ids, request_options=request_options) + return _response.data def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -271,18 +212,8 @@ def delete_many(self, id: int, *, request_options: typing.Optional[RequestOption id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete_many(id, request_options=request_options) + return _response.data def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -310,23 +241,24 @@ def download(self, filename: str, *, request_options: typing.Optional[RequestOpt filename="filename", ) """ - _response = self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.download(filename, request_options=request_options) + return _response.data class AsyncFilesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawFilesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawFilesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawFilesClient + """ + return self._raw_client async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> FileUpload: """ @@ -364,24 +296,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -418,18 +334,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -484,29 +390,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id_)}", - method="PATCH", - json={ - "id": id, - "file": file, - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.update(id_, id=id, file=file, request_options=request_options) + return _response.data async def list( self, @@ -560,28 +445,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="GET", - params={ - "all": all_, - "ids": ids, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[FileUpload], - parse_obj_as( - type_=typing.List[FileUpload], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, all_=all_, ids=ids, request_options=request_options) + return _response.data async def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -621,18 +486,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete_many(id, request_options=request_options) + return _response.data async def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -668,15 +523,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.download(filename, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/files/raw_client.py b/src/label_studio_sdk/files/raw_client.py new file mode 100644 index 000000000..756cbf4b8 --- /dev/null +++ b/src/label_studio_sdk/files/raw_client.py @@ -0,0 +1,523 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.file_upload import FileUpload + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawFilesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[FileUpload]: + """ + Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[FileUpload] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id_: int, + *, + id: typing.Optional[int] = OMIT, + file: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[FileUpload]: + """ + + Update a specific uploaded file. To get the file upload ID, use [Get files list](list). + + You will need to include the file data in the request body. For example: + ```bash + curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ + ``` + + Parameters + ---------- + id_ : int + A unique integer value identifying this file upload. + + id : typing.Optional[int] + + file : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[FileUpload] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id_)}", + method="PATCH", + json={ + "id": id, + "file": file, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, + id: int, + *, + all_: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[FileUpload]]: + """ + + Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + all_ : typing.Optional[bool] + Set to "true" if you want to retrieve all file uploads + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + Specify the list of file upload IDs to retrieve, e.g. ids=[1,2,3] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[FileUpload]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="GET", + params={ + "all": all_, + "ids": ids, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. + + Parameters + ---------- + filename : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawFilesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[FileUpload]: + """ + Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[FileUpload] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id_: int, + *, + id: typing.Optional[int] = OMIT, + file: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[FileUpload]: + """ + + Update a specific uploaded file. To get the file upload ID, use [Get files list](list). + + You will need to include the file data in the request body. For example: + ```bash + curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ + ``` + + Parameters + ---------- + id_ : int + A unique integer value identifying this file upload. + + id : typing.Optional[int] + + file : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[FileUpload] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id_)}", + method="PATCH", + json={ + "id": id, + "file": file, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, + id: int, + *, + all_: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[FileUpload]]: + """ + + Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + all_ : typing.Optional[bool] + Set to "true" if you want to retrieve all file uploads + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + Specify the list of file upload IDs to retrieve, e.g. ids=[1,2,3] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[FileUpload]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="GET", + params={ + "all": all_, + "ids": ids, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete_many( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def download( + self, filename: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. + + Parameters + ---------- + filename : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/__init__.py b/src/label_studio_sdk/import_storage/__init__.py index 51599b165..b2e5e5130 100644 --- a/src/label_studio_sdk/import_storage/__init__.py +++ b/src/label_studio_sdk/import_storage/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ImportStorageListTypesResponseItem from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse diff --git a/src/label_studio_sdk/import_storage/azure/__init__.py b/src/label_studio_sdk/import_storage/azure/__init__.py index 323fc5f3d..97dcea344 100644 --- a/src/label_studio_sdk/import_storage/azure/__init__.py +++ b/src/label_studio_sdk/import_storage/azure/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import AzureCreateResponse, AzureUpdateResponse __all__ = ["AzureCreateResponse", "AzureUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/azure/client.py b/src/label_studio_sdk/import_storage/azure/client.py index 655b8b5d4..2b70c67fe 100644 --- a/src/label_studio_sdk/import_storage/azure/client.py +++ b/src/label_studio_sdk/import_storage/azure/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_import_storage import AzureBlobImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawAzureClient, RawAzureClient from .types.azure_create_response import AzureCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class AzureClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAzureClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.import_storage.azure.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobImportStorage], - parse_obj_as( - type_=typing.List[AzureBlobImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -153,41 +142,21 @@ def create( ) client.import_storage.azure.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -264,36 +233,22 @@ def validate( ) client.import_storage.azure.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -326,24 +281,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -377,18 +316,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -470,41 +399,22 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -539,29 +449,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncAzureClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAzureClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -604,27 +509,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobImportStorage], - parse_obj_as( - type_=typing.List[AzureBlobImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -712,41 +598,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -831,36 +697,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -901,24 +753,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -960,18 +796,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1061,41 +887,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -1138,21 +945,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/azure/raw_client.py b/src/label_studio_sdk/import_storage/azure/raw_client.py new file mode 100644 index 000000000..6ebcfaff5 --- /dev/null +++ b/src/label_studio_sdk/import_storage/azure/raw_client.py @@ -0,0 +1,981 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.azure_blob_import_storage import AzureBlobImportStorage +from .types.azure_create_response import AzureCreateResponse +from .types.azure_update_response import AzureUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAzureClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[AzureBlobImportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[AzureBlobImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureCreateResponse]: + """ + + Create a new source storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobImportStorage]: + """ + + Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobImportStorage]: + """ + + Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawAzureClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[AzureBlobImportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[AzureBlobImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureCreateResponse]: + """ + + Create a new source storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobImportStorage]: + """ + + Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobImportStorage]: + """ + + Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/azure/types/__init__.py b/src/label_studio_sdk/import_storage/azure/types/__init__.py index 0cb2cdbbc..e56fb71c1 100644 --- a/src/label_studio_sdk/import_storage/azure/types/__init__.py +++ b/src/label_studio_sdk/import_storage/azure/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .azure_create_response import AzureCreateResponse from .azure_update_response import AzureUpdateResponse diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py index b59cf0117..b6cd9028b 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py index afacbeb28..207aa6d05 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/client.py b/src/label_studio_sdk/import_storage/client.py index 6ac60efe9..2fada220a 100644 --- a/src/label_studio_sdk/import_storage/client.py +++ b/src/label_studio_sdk/import_storage/client.py @@ -1,36 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.client_wrapper import SyncClientWrapper -from .azure.client import AzureClient -from .gcs.client import GcsClient -from .local.client import LocalClient -from .redis.client import RedisClient -from .s3.client import S3Client -from .s3s.client import S3SClient import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions +from .azure.client import AsyncAzureClient, AzureClient +from .gcs.client import AsyncGcsClient, GcsClient +from .local.client import AsyncLocalClient, LocalClient +from .raw_client import AsyncRawImportStorageClient, RawImportStorageClient +from .redis.client import AsyncRedisClient, RedisClient +from .s3.client import AsyncS3Client, S3Client +from .s3s.client import AsyncS3SClient, S3SClient from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper -from .azure.client import AsyncAzureClient -from .gcs.client import AsyncGcsClient -from .local.client import AsyncLocalClient -from .redis.client import AsyncRedisClient -from .s3.client import AsyncS3Client -from .s3s.client import AsyncS3SClient class ImportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AzureClient(client_wrapper=self._client_wrapper) - self.gcs = GcsClient(client_wrapper=self._client_wrapper) - self.local = LocalClient(client_wrapper=self._client_wrapper) - self.redis = RedisClient(client_wrapper=self._client_wrapper) - self.s3 = S3Client(client_wrapper=self._client_wrapper) - self.s3s = S3SClient(client_wrapper=self._client_wrapper) + self._raw_client = RawImportStorageClient(client_wrapper=client_wrapper) + self.azure = AzureClient(client_wrapper=client_wrapper) + + self.gcs = GcsClient(client_wrapper=client_wrapper) + + self.local = LocalClient(client_wrapper=client_wrapper) + + self.redis = RedisClient(client_wrapper=client_wrapper) + + self.s3 = S3Client(client_wrapper=client_wrapper) + + self.s3s = S3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawImportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawImportStorageClient + """ + return self._raw_client def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -57,35 +65,35 @@ def list_types( ) client.import_storage.list_types() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ImportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list_types(request_options=request_options) + return _response.data class AsyncImportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) - self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) - self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) - self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) - self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) - self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawImportStorageClient(client_wrapper=client_wrapper) + self.azure = AsyncAzureClient(client_wrapper=client_wrapper) + + self.gcs = AsyncGcsClient(client_wrapper=client_wrapper) + + self.local = AsyncLocalClient(client_wrapper=client_wrapper) + + self.redis = AsyncRedisClient(client_wrapper=client_wrapper) + + self.s3 = AsyncS3Client(client_wrapper=client_wrapper) + + self.s3s = AsyncS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawImportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawImportStorageClient + """ + return self._raw_client async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -120,21 +128,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ImportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_types(request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/gcs/__init__.py b/src/label_studio_sdk/import_storage/gcs/__init__.py index 2c4b3d376..7054c2af9 100644 --- a/src/label_studio_sdk/import_storage/gcs/__init__.py +++ b/src/label_studio_sdk/import_storage/gcs/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import GcsCreateResponse, GcsUpdateResponse __all__ = ["GcsCreateResponse", "GcsUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/gcs/client.py b/src/label_studio_sdk/import_storage/gcs/client.py index 5262a9ea0..aeca74bdc 100644 --- a/src/label_studio_sdk/import_storage/gcs/client.py +++ b/src/label_studio_sdk/import_storage/gcs/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_import_storage import GcsImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawGcsClient, RawGcsClient from .types.gcs_create_response import GcsCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class GcsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawGcsClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.import_storage.gcs.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsImportStorage], - parse_obj_as( - type_=typing.List[GcsImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -153,41 +142,21 @@ def create( ) client.import_storage.gcs.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -264,36 +233,22 @@ def validate( ) client.import_storage.gcs.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -326,24 +281,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -377,18 +316,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -470,41 +399,22 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -539,29 +449,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncGcsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawGcsClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -604,27 +509,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsImportStorage], - parse_obj_as( - type_=typing.List[GcsImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -712,41 +598,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -831,36 +697,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -901,24 +753,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -960,18 +796,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1061,41 +887,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -1138,21 +945,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/gcs/raw_client.py b/src/label_studio_sdk/import_storage/gcs/raw_client.py new file mode 100644 index 000000000..2e153f0f0 --- /dev/null +++ b/src/label_studio_sdk/import_storage/gcs/raw_client.py @@ -0,0 +1,981 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.gcs_import_storage import GcsImportStorage +from .types.gcs_create_response import GcsCreateResponse +from .types.gcs_update_response import GcsUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawGcsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[GcsImportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[GcsImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsCreateResponse]: + """ + + Create a new source storage connection to a Google Cloud Storage bucket. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsImportStorage]: + """ + + Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsImportStorage]: + """ + + Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawGcsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[GcsImportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[GcsImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsCreateResponse]: + """ + + Create a new source storage connection to a Google Cloud Storage bucket. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsImportStorage]: + """ + + Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsImportStorage]: + """ + + Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/gcs/types/__init__.py b/src/label_studio_sdk/import_storage/gcs/types/__init__.py index 832c1ee1c..73d049459 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/__init__.py +++ b/src/label_studio_sdk/import_storage/gcs/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .gcs_create_response import GcsCreateResponse from .gcs_update_response import GcsUpdateResponse diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py index 58c05a731..7950f54b0 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py index 54c7e415c..96e92949b 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/local/__init__.py b/src/label_studio_sdk/import_storage/local/__init__.py index 248109b66..44e4524e0 100644 --- a/src/label_studio_sdk/import_storage/local/__init__.py +++ b/src/label_studio_sdk/import_storage/local/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import LocalCreateResponse, LocalUpdateResponse __all__ = ["LocalCreateResponse", "LocalUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/local/client.py b/src/label_studio_sdk/import_storage/local/client.py index ad230f7ed..08bd1b9ba 100644 --- a/src/label_studio_sdk/import_storage/local/client.py +++ b/src/label_studio_sdk/import_storage/local/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_import_storage import LocalFilesImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawLocalClient, RawLocalClient from .types.local_create_response import LocalCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class LocalClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawLocalClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.import_storage.local.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesImportStorage], - parse_obj_as( - type_=typing.List[LocalFilesImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -131,36 +120,16 @@ def create( ) client.import_storage.local.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -217,31 +186,17 @@ def validate( ) client.import_storage.local.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -274,24 +229,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -325,18 +264,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -398,36 +327,17 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -462,29 +372,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncLocalClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawLocalClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -527,27 +432,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesImportStorage], - parse_obj_as( - type_=typing.List[LocalFilesImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -613,36 +499,16 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -707,31 +573,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -772,24 +624,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -831,18 +667,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -912,36 +738,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -986,21 +793,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/local/raw_client.py b/src/label_studio_sdk/import_storage/local/raw_client.py new file mode 100644 index 000000000..e631df782 --- /dev/null +++ b/src/label_studio_sdk/import_storage/local/raw_client.py @@ -0,0 +1,827 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.local_files_import_storage import LocalFilesImportStorage +from .types.local_create_response import LocalCreateResponse +from .types.local_update_response import LocalUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawLocalClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[LocalFilesImportStorage]]: + """ + + If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[LocalFilesImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalCreateResponse]: + """ + + Create a new source storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesImportStorage]: + """ + + Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalUpdateResponse]: + """ + + Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesImportStorage]: + """ + + Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawLocalClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[LocalFilesImportStorage]]: + """ + + If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[LocalFilesImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalCreateResponse]: + """ + + Create a new source storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesImportStorage]: + """ + + Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalUpdateResponse]: + """ + + Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesImportStorage]: + """ + + Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/local/types/__init__.py b/src/label_studio_sdk/import_storage/local/types/__init__.py index 9a12e8745..5f88d9245 100644 --- a/src/label_studio_sdk/import_storage/local/types/__init__.py +++ b/src/label_studio_sdk/import_storage/local/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .local_create_response import LocalCreateResponse from .local_update_response import LocalUpdateResponse diff --git a/src/label_studio_sdk/import_storage/local/types/local_create_response.py b/src/label_studio_sdk/import_storage/local/types/local_create_response.py index 95051747a..4f45cad89 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/local/types/local_update_response.py b/src/label_studio_sdk/import_storage/local/types/local_update_response.py index e5dd8df6c..885189c74 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/raw_client.py b/src/label_studio_sdk/import_storage/raw_client.py new file mode 100644 index 000000000..71f3ffbe2 --- /dev/null +++ b/src/label_studio_sdk/import_storage/raw_client.py @@ -0,0 +1,93 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem + + +class RawImportStorageClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ImportStorageListTypesResponseItem]]: + """ + Retrieve a list of the import storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ImportStorageListTypesResponseItem]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawImportStorageClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ImportStorageListTypesResponseItem]]: + """ + Retrieve a list of the import storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ImportStorageListTypesResponseItem]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/redis/__init__.py b/src/label_studio_sdk/import_storage/redis/__init__.py index 7f87f18fe..e52cb2ace 100644 --- a/src/label_studio_sdk/import_storage/redis/__init__.py +++ b/src/label_studio_sdk/import_storage/redis/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import RedisCreateResponse, RedisUpdateResponse __all__ = ["RedisCreateResponse", "RedisUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/redis/client.py b/src/label_studio_sdk/import_storage/redis/client.py index dd9d6225a..fa8ac2684 100644 --- a/src/label_studio_sdk/import_storage/redis/client.py +++ b/src/label_studio_sdk/import_storage/redis/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_import_storage import RedisImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawRedisClient, RawRedisClient from .types.redis_create_response import RedisCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class RedisClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawRedisClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.import_storage.redis.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisImportStorage], - parse_obj_as( - type_=typing.List[RedisImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -143,39 +132,19 @@ def create( ) client.import_storage.redis.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -244,34 +213,20 @@ def validate( ) client.import_storage.redis.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -304,24 +259,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -355,18 +294,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -440,39 +369,20 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -507,29 +417,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncRedisClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawRedisClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -572,27 +477,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisImportStorage], - parse_obj_as( - type_=typing.List[RedisImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -670,39 +556,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -779,34 +645,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -847,24 +699,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -906,18 +742,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -999,39 +825,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -1074,21 +881,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/redis/raw_client.py b/src/label_studio_sdk/import_storage/redis/raw_client.py new file mode 100644 index 000000000..2411795ad --- /dev/null +++ b/src/label_studio_sdk/import_storage/redis/raw_client.py @@ -0,0 +1,917 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.redis_import_storage import RedisImportStorage +from .types.redis_create_response import RedisCreateResponse +from .types.redis_update_response import RedisUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawRedisClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[RedisImportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[RedisImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisCreateResponse]: + """ + + Create a new source storage connection to a Redis database. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisImportStorage]: + """ + + Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisImportStorage]: + """ + + Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawRedisClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[RedisImportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[RedisImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisCreateResponse]: + """ + + Create a new source storage connection to a Redis database. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisImportStorage]: + """ + + Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisImportStorage]: + """ + + Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/redis/types/__init__.py b/src/label_studio_sdk/import_storage/redis/types/__init__.py index b3557bc08..aea7ed291 100644 --- a/src/label_studio_sdk/import_storage/redis/types/__init__.py +++ b/src/label_studio_sdk/import_storage/redis/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .redis_create_response import RedisCreateResponse from .redis_update_response import RedisUpdateResponse diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py index fa8fba715..c2dda07b6 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py index 247ff9057..6d9310ee1 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3/__init__.py b/src/label_studio_sdk/import_storage/s3/__init__.py index c749fe227..e6421caaf 100644 --- a/src/label_studio_sdk/import_storage/s3/__init__.py +++ b/src/label_studio_sdk/import_storage/s3/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import S3CreateResponse, S3UpdateResponse __all__ = ["S3CreateResponse", "S3UpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/s3/client.py b/src/label_studio_sdk/import_storage/s3/client.py index 227044611..6219d680c 100644 --- a/src/label_studio_sdk/import_storage/s3/client.py +++ b/src/label_studio_sdk/import_storage/s3/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3import_storage import S3ImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawS3Client, RawS3Client from .types.s3create_response import S3CreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class S3Client: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3Client + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -53,27 +61,8 @@ def list( ) client.import_storage.s3.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ImportStorage], - parse_obj_as( - type_=typing.List[S3ImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -173,46 +162,26 @@ def create( ) client.import_storage.s3.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -309,41 +278,27 @@ def validate( ) client.import_storage.s3.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -376,24 +331,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -427,18 +366,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -540,46 +469,27 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -614,29 +524,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncS3Client: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3Client + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -679,27 +584,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ImportStorage], - parse_obj_as( - type_=typing.List[S3ImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -807,46 +693,26 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -951,41 +817,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -1026,24 +878,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -1085,18 +921,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1206,46 +1032,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -1288,21 +1095,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/s3/raw_client.py b/src/label_studio_sdk/import_storage/s3/raw_client.py new file mode 100644 index 000000000..9604ce40d --- /dev/null +++ b/src/label_studio_sdk/import_storage/s3/raw_client.py @@ -0,0 +1,1129 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3import_storage import S3ImportStorage +from .types.s3create_response import S3CreateResponse +from .types.s3update_response import S3UpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3Client: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3ImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3ImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3CreateResponse]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3CreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[S3ImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3UpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3ImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3Client: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3ImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3ImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3CreateResponse]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3CreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3UpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/s3/types/__init__.py b/src/label_studio_sdk/import_storage/s3/types/__init__.py index 3cc20ce7b..bb333983a 100644 --- a/src/label_studio_sdk/import_storage/s3/types/__init__.py +++ b/src/label_studio_sdk/import_storage/s3/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .s3create_response import S3CreateResponse from .s3update_response import S3UpdateResponse diff --git a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py index 86b6e5fd3..ac8bed298 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3CreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py index 3ed56596d..95d6fe001 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3UpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3s/__init__.py b/src/label_studio_sdk/import_storage/s3s/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/import_storage/s3s/__init__.py +++ b/src/label_studio_sdk/import_storage/s3s/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/import_storage/s3s/client.py b/src/label_studio_sdk/import_storage/s3s/client.py index eca17a326..4f39746fe 100644 --- a/src/label_studio_sdk/import_storage/s3s/client.py +++ b/src/label_studio_sdk/import_storage/s3s/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3s_import_storage import S3SImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...core.jsonable_encoder import jsonable_encoder -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawS3SClient, RawS3SClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class S3SClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3SClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -51,27 +59,8 @@ def list( ) client.import_storage.s3s.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SImportStorage], - parse_obj_as( - type_=typing.List[S3SImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -163,44 +152,24 @@ def create( ) client.import_storage.s3s.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -231,24 +200,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -282,18 +235,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -387,44 +330,25 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -509,38 +433,24 @@ def validate( ) client.import_storage.s3s.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s/validate", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -571,29 +481,24 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncS3SClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3SClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -636,27 +541,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SImportStorage], - parse_obj_as( - type_=typing.List[S3SImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -756,44 +642,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -832,24 +698,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -891,18 +741,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1004,44 +844,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -1134,38 +955,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s/validate", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -1204,21 +1011,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/s3s/raw_client.py b/src/label_studio_sdk/import_storage/s3s/raw_client.py new file mode 100644 index 000000000..52f3367ae --- /dev/null +++ b/src/label_studio_sdk/import_storage/s3s/raw_client.py @@ -0,0 +1,1047 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3s_import_storage import S3SImportStorage + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3SClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3SImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3SImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SImportStorage]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3SImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SImportStorage]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + Import storage ID + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3SImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3SClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3SImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3SImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + Import storage ID + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/types/__init__.py b/src/label_studio_sdk/import_storage/types/__init__.py index f995a3c6d..f82663649 100644 --- a/src/label_studio_sdk/import_storage/types/__init__.py +++ b/src/label_studio_sdk/import_storage/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .import_storage_list_types_response_item import ImportStorageListTypesResponseItem __all__ = ["ImportStorageListTypesResponseItem"] diff --git a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py index 21112358a..3247ea665 100644 --- a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py +++ b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ImportStorageListTypesResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/jwt_settings/__init__.py b/src/label_studio_sdk/jwt_settings/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/jwt_settings/__init__.py +++ b/src/label_studio_sdk/jwt_settings/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/jwt_settings/client.py b/src/label_studio_sdk/jwt_settings/client.py index 338513a6c..653bddc46 100644 --- a/src/label_studio_sdk/jwt_settings/client.py +++ b/src/label_studio_sdk/jwt_settings/client.py @@ -1,13 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.jwt_settings_response import JwtSettingsResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawJwtSettingsClient, RawJwtSettingsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,7 +13,18 @@ class JwtSettingsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawJwtSettingsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawJwtSettingsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawJwtSettingsClient + """ + return self._raw_client def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> JwtSettingsResponse: """ @@ -40,24 +49,8 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Jwt ) client.jwt_settings.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(request_options=request_options) + return _response.data def create( self, @@ -102,35 +95,29 @@ def create( api_token_ttl_days=1, ) """ - _response = self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="POST", - json={ - "api_tokens_enabled": api_tokens_enabled, - "legacy_api_tokens_enabled": legacy_api_tokens_enabled, - "api_token_ttl_days": api_token_ttl_days, - }, + _response = self._raw_client.create( + api_tokens_enabled=api_tokens_enabled, + legacy_api_tokens_enabled=legacy_api_tokens_enabled, + api_token_ttl_days=api_token_ttl_days, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncJwtSettingsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawJwtSettingsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawJwtSettingsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawJwtSettingsClient + """ + return self._raw_client async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> JwtSettingsResponse: """ @@ -163,24 +150,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(request_options=request_options) + return _response.data async def create( self, @@ -233,27 +204,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="POST", - json={ - "api_tokens_enabled": api_tokens_enabled, - "legacy_api_tokens_enabled": legacy_api_tokens_enabled, - "api_token_ttl_days": api_token_ttl_days, - }, + _response = await self._raw_client.create( + api_tokens_enabled=api_tokens_enabled, + legacy_api_tokens_enabled=legacy_api_tokens_enabled, + api_token_ttl_days=api_token_ttl_days, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/jwt_settings/raw_client.py b/src/label_studio_sdk/jwt_settings/raw_client.py new file mode 100644 index 000000000..33cf55a1b --- /dev/null +++ b/src/label_studio_sdk/jwt_settings/raw_client.py @@ -0,0 +1,212 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.jwt_settings_response import JwtSettingsResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawJwtSettingsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[JwtSettingsResponse]: + """ + Retrieve JWT settings for the currently-active organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[JwtSettingsResponse] + JWT settings retrieved successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + api_tokens_enabled: bool, + legacy_api_tokens_enabled: bool, + api_token_ttl_days: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[JwtSettingsResponse]: + """ + Update JWT settings for the currently active organization. + + Parameters + ---------- + api_tokens_enabled : bool + Whether JWT API tokens are enabled + + legacy_api_tokens_enabled : bool + Whether legacy API tokens are enabled + + api_token_ttl_days : int + Number of days before API tokens expire + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[JwtSettingsResponse] + JWT settings updated successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="POST", + json={ + "api_tokens_enabled": api_tokens_enabled, + "legacy_api_tokens_enabled": legacy_api_tokens_enabled, + "api_token_ttl_days": api_token_ttl_days, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawJwtSettingsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[JwtSettingsResponse]: + """ + Retrieve JWT settings for the currently-active organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[JwtSettingsResponse] + JWT settings retrieved successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + api_tokens_enabled: bool, + legacy_api_tokens_enabled: bool, + api_token_ttl_days: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[JwtSettingsResponse]: + """ + Update JWT settings for the currently active organization. + + Parameters + ---------- + api_tokens_enabled : bool + Whether JWT API tokens are enabled + + legacy_api_tokens_enabled : bool + Whether legacy API tokens are enabled + + api_token_ttl_days : int + Number of days before API tokens expire + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[JwtSettingsResponse] + JWT settings updated successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="POST", + json={ + "api_tokens_enabled": api_tokens_enabled, + "legacy_api_tokens_enabled": legacy_api_tokens_enabled, + "api_token_ttl_days": api_token_ttl_days, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/ml/__init__.py b/src/label_studio_sdk/ml/__init__.py index e0f97600c..613f98967 100644 --- a/src/label_studio_sdk/ml/__init__.py +++ b/src/label_studio_sdk/ml/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( MlCreateRequestAuthMethod, MlCreateResponse, diff --git a/src/label_studio_sdk/ml/client.py b/src/label_studio_sdk/ml/client.py index 6f635314a..2284a86d3 100644 --- a/src/label_studio_sdk/ml/client.py +++ b/src/label_studio_sdk/ml/client.py @@ -1,19 +1,15 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.ml_backend import MlBackend -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from .raw_client import AsyncRawMlClient, RawMlClient from .types.ml_create_request_auth_method import MlCreateRequestAuthMethod from .types.ml_create_response import MlCreateResponse -from ..core.jsonable_encoder import jsonable_encoder from .types.ml_update_request_auth_method import MlUpdateRequestAuthMethod from .types.ml_update_response import MlUpdateResponse -from ..errors.internal_server_error import InternalServerError -from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -21,7 +17,18 @@ class MlClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawMlClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawMlClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawMlClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -55,27 +62,8 @@ def list( ) client.ml.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/ml/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MlBackend], - parse_obj_as( - type_=typing.List[MlBackend], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -149,40 +137,20 @@ def create( ) client.ml.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/ml/", - method="POST", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlCreateResponse, - parse_obj_as( - type_=MlCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ @@ -215,24 +183,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlBackend, - parse_obj_as( - type_=MlBackend, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -264,18 +216,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -353,40 +295,21 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="PATCH", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlUpdateResponse, - parse_obj_as( - type_=MlUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def predict_interactive( self, @@ -436,26 +359,10 @@ def predict_interactive( task=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/interactive-annotating", - method="POST", - json={ - "task": task, - "context": context, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.predict_interactive( + id, task=task, context=context, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def train( self, @@ -498,35 +405,8 @@ def train( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/train", - method="POST", - json={ - "use_ground_truth": use_ground_truth, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 500: - raise InternalServerError( - typing.cast( - str, - parse_obj_as( - type_=str, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.train(id, use_ground_truth=use_ground_truth, request_options=request_options) + return _response.data def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -555,23 +435,24 @@ def list_model_versions(self, id: str, *, request_options: typing.Optional[Reque id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list_model_versions(id, request_options=request_options) + return _response.data class AsyncMlClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawMlClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawMlClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawMlClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -613,27 +494,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/ml/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MlBackend], - parse_obj_as( - type_=typing.List[MlBackend], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -715,40 +577,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/ml/", - method="POST", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlCreateResponse, - parse_obj_as( - type_=MlCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ @@ -789,24 +631,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlBackend, - parse_obj_as( - type_=MlBackend, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -846,18 +672,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -943,40 +759,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="PATCH", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlUpdateResponse, - parse_obj_as( - type_=MlUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def predict_interactive( self, @@ -1034,26 +831,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/interactive-annotating", - method="POST", - json={ - "task": task, - "context": context, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.predict_interactive( + id, task=task, context=context, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def train( self, @@ -1104,35 +885,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/train", - method="POST", - json={ - "use_ground_truth": use_ground_truth, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 500: - raise InternalServerError( - typing.cast( - str, - parse_obj_as( - type_=str, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.train(id, use_ground_truth=use_ground_truth, request_options=request_options) + return _response.data async def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -1169,15 +923,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_model_versions(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/ml/raw_client.py b/src/label_studio_sdk/ml/raw_client.py new file mode 100644 index 000000000..30ebeee47 --- /dev/null +++ b/src/label_studio_sdk/ml/raw_client.py @@ -0,0 +1,968 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.internal_server_error import InternalServerError +from ..types.ml_backend import MlBackend +from .types.ml_create_request_auth_method import MlCreateRequestAuthMethod +from .types.ml_create_response import MlCreateResponse +from .types.ml_update_request_auth_method import MlUpdateRequestAuthMethod +from .types.ml_update_response import MlUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawMlClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[MlBackend]]: + """ + + List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). + + + You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[MlBackend]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MlCreateResponse]: + """ + + Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). + + If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). + + If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). + + Parameters + ---------- + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlCreateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MlCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/ml/", + method="POST", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[MlBackend]: + """ + + Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MlBackend] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MlUpdateResponse]: + """ + + Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlUpdateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MlUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="PATCH", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def predict_interactive( + self, + id: int, + *, + task: int, + context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Enable interactive pre-annotations for a specific task. + + ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). + + Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). + + You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + task : int + ID of task to annotate + + context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Context for ML model + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/interactive-annotating", + method="POST", + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def train( + self, + id: int, + *, + use_ground_truth: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. + + For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). + + You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + use_ground_truth : typing.Optional[bool] + Whether to include ground truth annotations in training + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/train", + method="POST", + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list_model_versions( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawMlClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[MlBackend]]: + """ + + List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). + + + You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[MlBackend]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MlCreateResponse]: + """ + + Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). + + If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). + + If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). + + Parameters + ---------- + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlCreateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MlCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/ml/", + method="POST", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[MlBackend]: + """ + + Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MlBackend] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MlUpdateResponse]: + """ + + Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlUpdateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MlUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="PATCH", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def predict_interactive( + self, + id: int, + *, + task: int, + context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Enable interactive pre-annotations for a specific task. + + ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). + + Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). + + You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + task : int + ID of task to annotate + + context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Context for ML model + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/interactive-annotating", + method="POST", + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def train( + self, + id: int, + *, + use_ground_truth: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. + + For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). + + You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + use_ground_truth : typing.Optional[bool] + Whether to include ground truth annotations in training + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/train", + method="POST", + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list_model_versions( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/ml/types/__init__.py b/src/label_studio_sdk/ml/types/__init__.py index b308dc9d2..06b8b419d 100644 --- a/src/label_studio_sdk/ml/types/__init__.py +++ b/src/label_studio_sdk/ml/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .ml_create_request_auth_method import MlCreateRequestAuthMethod from .ml_create_response import MlCreateResponse from .ml_create_response_auth_method import MlCreateResponseAuthMethod diff --git a/src/label_studio_sdk/ml/types/ml_create_response.py b/src/label_studio_sdk/ml/types/ml_create_response.py index 030fa3d3c..abb00160f 100644 --- a/src/label_studio_sdk/ml/types/ml_create_response.py +++ b/src/label_studio_sdk/ml/types/ml_create_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_create_response_auth_method import MlCreateResponseAuthMethod -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class MlCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/ml/types/ml_update_response.py b/src/label_studio_sdk/ml/types/ml_update_response.py index f23e5dadd..5220c5bce 100644 --- a/src/label_studio_sdk/ml/types/ml_update_response.py +++ b/src/label_studio_sdk/ml/types/ml_update_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_update_response_auth_method import MlUpdateResponseAuthMethod -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class MlUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/model_providers/__init__.py b/src/label_studio_sdk/model_providers/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/model_providers/__init__.py +++ b/src/label_studio_sdk/model_providers/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/model_providers/client.py b/src/label_studio_sdk/model_providers/client.py index 92910f2e5..c285b4e77 100644 --- a/src/label_studio_sdk/model_providers/client.py +++ b/src/label_studio_sdk/model_providers/client.py @@ -1,21 +1,17 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.model_provider_connection import ModelProviderConnection -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod +from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization from ..types.model_provider_connection_provider import ModelProviderConnectionProvider from ..types.model_provider_connection_scope import ModelProviderConnectionScope -from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization -from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy -import datetime as dt -from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawModelProvidersClient, RawModelProvidersClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +19,18 @@ class ModelProvidersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawModelProvidersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawModelProvidersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawModelProvidersClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ModelProviderConnection]: """ @@ -48,24 +55,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.model_providers.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ModelProviderConnection], - parse_obj_as( - type_=typing.List[ModelProviderConnection], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -147,46 +138,25 @@ def create( provider="OpenAI", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="POST", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = self._raw_client.create( + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> ModelProviderConnection: """ @@ -216,24 +186,8 @@ def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = Non pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(pk, request_options=request_options) + return _response.data def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -262,18 +216,8 @@ def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(pk, request_options=request_options) + return _response.data def update( self, @@ -360,51 +304,42 @@ def update( provider="OpenAI", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="PATCH", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = self._raw_client.update( + pk, + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncModelProvidersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawModelProvidersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawModelProvidersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawModelProvidersClient + """ + return self._raw_client async def list( self, *, request_options: typing.Optional[RequestOptions] = None @@ -439,24 +374,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ModelProviderConnection], - parse_obj_as( - type_=typing.List[ModelProviderConnection], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -546,46 +465,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="POST", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = await self._raw_client.create( + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> ModelProviderConnection: """ @@ -623,24 +521,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(pk, request_options=request_options) + return _response.data async def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -677,18 +559,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(pk, request_options=request_options) + return _response.data async def update( self, @@ -783,43 +655,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="PATCH", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = await self._raw_client.update( + pk, + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/model_providers/raw_client.py b/src/label_studio_sdk/model_providers/raw_client.py new file mode 100644 index 000000000..e481717d1 --- /dev/null +++ b/src/label_studio_sdk/model_providers/raw_client.py @@ -0,0 +1,706 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.model_provider_connection import ModelProviderConnection +from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod +from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization +from ..types.model_provider_connection_provider import ModelProviderConnectionProvider +from ..types.model_provider_connection_scope import ModelProviderConnectionScope + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawModelProvidersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ModelProviderConnection]]: + """ + Get all model provider connections created by the user in the current organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ModelProviderConnection]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ModelProviderConnection]: + """ + Create a new model provider connection. + + Parameters + ---------- + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ModelProviderConnection] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ModelProviderConnection]: + """ + Get a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ModelProviderConnection] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + pk: int, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ModelProviderConnection]: + """ + Update a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ModelProviderConnection] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="PATCH", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawModelProvidersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ModelProviderConnection]]: + """ + Get all model provider connections created by the user in the current organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ModelProviderConnection]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ModelProviderConnection]: + """ + Create a new model provider connection. + + Parameters + ---------- + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ModelProviderConnection] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ModelProviderConnection]: + """ + Get a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ModelProviderConnection] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + pk: int, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ModelProviderConnection]: + """ + Update a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ModelProviderConnection] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="PATCH", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/predictions/__init__.py b/src/label_studio_sdk/predictions/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/predictions/__init__.py +++ b/src/label_studio_sdk/predictions/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/predictions/client.py b/src/label_studio_sdk/predictions/client.py index f1f84734d..f8f94ee1d 100644 --- a/src/label_studio_sdk/predictions/client.py +++ b/src/label_studio_sdk/predictions/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.prediction import Prediction -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawPredictionsClient, RawPredictionsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class PredictionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawPredictionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPredictionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPredictionsClient + """ + return self._raw_client def list( self, @@ -60,28 +68,8 @@ def list( ) client.predictions.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/predictions/", - method="GET", - params={ - "task": task, - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prediction], - parse_obj_as( - type_=typing.List[Prediction], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(task=task, project=project, request_options=request_options) + return _response.data def create( self, @@ -157,34 +145,10 @@ def create( model_version="yolo-v8", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/predictions/", - method="POST", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ @@ -217,24 +181,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -264,18 +212,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -349,39 +287,26 @@ def update( model_version="yolo-v8", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update( + id, task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPredictionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawPredictionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPredictionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPredictionsClient + """ + return self._raw_client async def list( self, @@ -433,28 +358,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/predictions/", - method="GET", - params={ - "task": task, - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prediction], - parse_obj_as( - type_=typing.List[Prediction], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(task=task, project=project, request_options=request_options) + return _response.data async def create( self, @@ -538,34 +443,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/predictions/", - method="POST", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ @@ -606,24 +487,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -661,18 +526,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -754,31 +609,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update( + id, task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/predictions/raw_client.py b/src/label_studio_sdk/predictions/raw_client.py new file mode 100644 index 000000000..7fc9c7107 --- /dev/null +++ b/src/label_studio_sdk/predictions/raw_client.py @@ -0,0 +1,573 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.prediction import Prediction + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPredictionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + task: typing.Optional[int] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[Prediction]]: + """ + + Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). + + The terms "predictions" and pre-annotations" are used interchangeably. + + Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). + + To import predictions via the API, see [Create prediction](create). + + Parameters + ---------- + task : typing.Optional[int] + Filter predictions by task ID + + project : typing.Optional[int] + Filter predictions by project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Prediction]] + Predictions list + """ + _response = self._client_wrapper.httpx_client.request( + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prediction]: + """ + + If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. + + To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. + + #### JSON format for predictions + Label Studio JSON format for pre-annotations must contain two sections: + * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. + * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + + For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) + + Parameters + ---------- + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prediction] + Created prediction + """ + _response = self._client_wrapper.httpx_client.request( + "api/predictions/", + method="POST", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Prediction]: + """ + + Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prediction] + Prediction details + """ + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a prediction. To find the prediction ID, use [List predictions](list). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prediction]: + """ + + Update a prediction. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prediction] + Updated prediction + """ + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawPredictionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + task: typing.Optional[int] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[Prediction]]: + """ + + Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). + + The terms "predictions" and pre-annotations" are used interchangeably. + + Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). + + To import predictions via the API, see [Create prediction](create). + + Parameters + ---------- + task : typing.Optional[int] + Filter predictions by task ID + + project : typing.Optional[int] + Filter predictions by project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Prediction]] + Predictions list + """ + _response = await self._client_wrapper.httpx_client.request( + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prediction]: + """ + + If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. + + To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. + + #### JSON format for predictions + Label Studio JSON format for pre-annotations must contain two sections: + * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. + * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + + For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) + + Parameters + ---------- + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prediction] + Created prediction + """ + _response = await self._client_wrapper.httpx_client.request( + "api/predictions/", + method="POST", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Prediction]: + """ + + Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prediction] + Prediction details + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a prediction. To find the prediction ID, use [List predictions](list). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prediction]: + """ + + Update a prediction. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prediction] + Updated prediction + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/__init__.py b/src/label_studio_sdk/projects/__init__.py index 024a38ecc..1cdd91f96 100644 --- a/src/label_studio_sdk/projects/__init__.py +++ b/src/label_studio_sdk/projects/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ProjectsCreateResponse, ProjectsImportTasksResponse, ProjectsListResponse, ProjectsUpdateResponse from . import exports, pauses from .exports import ExportsConvertResponse, ExportsListFormatsResponseItem diff --git a/src/label_studio_sdk/projects/client.py b/src/label_studio_sdk/projects/client.py index 70b5df94d..5a1bf9e7d 100644 --- a/src/label_studio_sdk/projects/client.py +++ b/src/label_studio_sdk/projects/client.py @@ -1,26 +1,18 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper -from .pauses.client import PausesClient -from .exports.client import ExportsClient + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.pagination import AsyncPager, SyncPager from ..core.request_options import RequestOptions -from ..core.pagination import SyncPager from ..types.project import Project -from .types.projects_list_response import ProjectsListResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from ..types.project_label_config import ProjectLabelConfig +from .exports.client import AsyncExportsClient, ExportsClient +from .pauses.client import AsyncPausesClient, PausesClient +from .raw_client import AsyncRawProjectsClient, RawProjectsClient from .types.projects_create_response import ProjectsCreateResponse -from ..core.jsonable_encoder import jsonable_encoder -from .types.projects_update_response import ProjectsUpdateResponse from .types.projects_import_tasks_response import ProjectsImportTasksResponse -from ..errors.bad_request_error import BadRequestError -from ..types.project_label_config import ProjectLabelConfig -from ..core.client_wrapper import AsyncClientWrapper -from .pauses.client import AsyncPausesClient -from .exports.client import AsyncExportsClient -from ..core.pagination import AsyncPager +from .types.projects_update_response import ProjectsUpdateResponse # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -28,9 +20,21 @@ class ProjectsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.pauses = PausesClient(client_wrapper=self._client_wrapper) - self.exports = ExportsClient(client_wrapper=self._client_wrapper) + self._raw_client = RawProjectsClient(client_wrapper=client_wrapper) + self.pauses = PausesClient(client_wrapper=client_wrapper) + + self.exports = ExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawProjectsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawProjectsClient + """ + return self._raw_client def list( self, @@ -97,45 +101,15 @@ def list( for page in response.iter_pages(): yield page """ - page = page if page is not None else 1 - _response = self._client_wrapper.httpx_client.request( - "api/projects/", - method="GET", - params={ - "ordering": ordering, - "ids": ids, - "title": title, - "page": page, - "page_size": page_size, - "workspaces": workspaces, - }, + return self._raw_client.list( + ordering=ordering, + ids=ids, + title=title, + page=page, + page_size=page_size, + workspaces=workspaces, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - ProjectsListResponse, - parse_obj_as( - type_=ProjectsListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - workspaces=workspaces, - request_options=request_options, - ) - _items = _parsed_response.results - return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -232,45 +206,25 @@ def create( ) client.projects.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/projects/", - method="POST", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsCreateResponse, - parse_obj_as( - type_=ProjectsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Project: """ @@ -300,24 +254,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Project, - parse_obj_as( - type_=Project, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -349,18 +287,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -467,45 +395,26 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsUpdateResponse, - parse_obj_as( - type_=ProjectsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def import_tasks( self, @@ -604,41 +513,15 @@ def import_tasks( request=[{"key": "value"}], ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/import", - method="POST", - params={ - "commit_to_project": commit_to_project, - "return_task_ids": return_task_ids, - "preannotated_from_fields": preannotated_from_fields, - }, - json=request, + _response = self._raw_client.import_tasks( + id, + request=request, + commit_to_project=commit_to_project, + return_task_ids=return_task_ids, + preannotated_from_fields=preannotated_from_fields, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsImportTasksResponse, - parse_obj_as( - type_=ProjectsImportTasksResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate_config( self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None @@ -677,35 +560,27 @@ def validate_config( label_config="label_config", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/validate/", - method="POST", - json={ - "label_config": label_config, - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectLabelConfig, - parse_obj_as( - type_=ProjectLabelConfig, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.validate_config(id, label_config=label_config, request_options=request_options) + return _response.data class AsyncProjectsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.pauses = AsyncPausesClient(client_wrapper=self._client_wrapper) - self.exports = AsyncExportsClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawProjectsClient(client_wrapper=client_wrapper) + self.pauses = AsyncPausesClient(client_wrapper=client_wrapper) + + self.exports = AsyncExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawProjectsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawProjectsClient + """ + return self._raw_client async def list( self, @@ -773,6 +648,7 @@ async def main() -> None: response = await client.projects.list() async for item in response: yield item + # alternatively, you can paginate page-by-page async for page in response.iter_pages(): yield page @@ -780,45 +656,15 @@ async def main() -> None: asyncio.run(main()) """ - page = page if page is not None else 1 - _response = await self._client_wrapper.httpx_client.request( - "api/projects/", - method="GET", - params={ - "ordering": ordering, - "ids": ids, - "title": title, - "page": page, - "page_size": page_size, - "workspaces": workspaces, - }, + return await self._raw_client.list( + ordering=ordering, + ids=ids, + title=title, + page=page, + page_size=page_size, + workspaces=workspaces, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - ProjectsListResponse, - parse_obj_as( - type_=ProjectsListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - workspaces=workspaces, - request_options=request_options, - ) - _items = _parsed_response.results - return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -923,45 +769,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/projects/", - method="POST", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsCreateResponse, - parse_obj_as( - type_=ProjectsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Project: """ @@ -999,24 +825,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Project, - parse_obj_as( - type_=Project, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -1056,18 +866,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1182,45 +982,26 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsUpdateResponse, - parse_obj_as( - type_=ProjectsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def import_tasks( self, @@ -1327,41 +1108,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/import", - method="POST", - params={ - "commit_to_project": commit_to_project, - "return_task_ids": return_task_ids, - "preannotated_from_fields": preannotated_from_fields, - }, - json=request, + _response = await self._raw_client.import_tasks( + id, + request=request, + commit_to_project=commit_to_project, + return_task_ids=return_task_ids, + preannotated_from_fields=preannotated_from_fields, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsImportTasksResponse, - parse_obj_as( - type_=ProjectsImportTasksResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate_config( self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None @@ -1408,25 +1163,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/validate/", - method="POST", - json={ - "label_config": label_config, - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.validate_config( + id, label_config=label_config, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectLabelConfig, - parse_obj_as( - type_=ProjectLabelConfig, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/projects/exports/__init__.py b/src/label_studio_sdk/projects/exports/__init__.py index e251c825b..8366b6166 100644 --- a/src/label_studio_sdk/projects/exports/__init__.py +++ b/src/label_studio_sdk/projects/exports/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ExportsConvertResponse, ExportsListFormatsResponseItem __all__ = ["ExportsConvertResponse", "ExportsListFormatsResponseItem"] diff --git a/src/label_studio_sdk/projects/exports/client.py b/src/label_studio_sdk/projects/exports/client.py index 85e7ee557..1703c09ec 100644 --- a/src/label_studio_sdk/projects/exports/client.py +++ b/src/label_studio_sdk/projects/exports/client.py @@ -1,26 +1,22 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions -from ...core.jsonable_encoder import jsonable_encoder -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem -from ...core.pydantic_utilities import parse_obj_as +from ...types.annotation_filter_options import AnnotationFilterOptions +from ...types.converted_format import ConvertedFormat from ...types.export import Export -from ...types.user_simple import UserSimple -import datetime as dt +from ...types.export_format import ExportFormat +from ...types.export_snapshot import ExportSnapshot from ...types.export_snapshot_status import ExportSnapshotStatus -from ...types.converted_format import ConvertedFormat -from ...types.task_filter_options import TaskFilterOptions -from ...types.annotation_filter_options import AnnotationFilterOptions from ...types.serialization_options import SerializationOptions -from ...types.export_snapshot import ExportSnapshot -from ...core.serialization import convert_and_respect_annotation_metadata -from ...types.export_format import ExportFormat +from ...types.task_filter_options import TaskFilterOptions +from ...types.user_simple import UserSimple +from .raw_client import AsyncRawExportsClient, RawExportsClient from .types.exports_convert_response import ExportsConvertResponse -from ...core.client_wrapper import AsyncClientWrapper +from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -28,7 +24,18 @@ class ExportsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawExportsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawExportsClient + """ + return self._raw_client def download_sync( self, @@ -83,33 +90,20 @@ def download_sync( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.Iterator[bytes] Exported data in binary format """ - with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(id)}/export", - method="GET", - params={ - "export_type": export_type, - "download_all_tasks": download_all_tasks, - "download_resources": download_resources, - "ids": ids, - }, + with self._raw_client.download_sync( + id, + export_type=export_type, + download_all_tasks=download_all_tasks, + download_resources=download_resources, + ids=ids, request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - for _chunk in _response.iter_bytes(chunk_size=_chunk_size): - yield _chunk - return - _response.read() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + ) as r: + yield from r.data def list_formats( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -144,24 +138,8 @@ def list_formats( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportsListFormatsResponseItem], - parse_obj_as( - type_=typing.List[ExportsListFormatsResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list_formats(id, request_options=request_options) + return _response.data def list(self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Export]: """ @@ -194,24 +172,8 @@ def list(self, project_id: int, *, request_options: typing.Optional[RequestOptio project_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Export], - parse_obj_as( - type_=typing.List[Export], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project_id, request_options=request_options) + return _response.data def create( self, @@ -289,49 +251,23 @@ def create( project_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="POST", - json={ - "title": title, - "id": id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=UserSimple, direction="write" - ), - "created_at": created_at, - "finished_at": finished_at, - "status": status, - "md5": md5, - "counters": counters, - "converted_formats": convert_and_respect_annotation_metadata( - object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" - ), - "task_filter_options": convert_and_respect_annotation_metadata( - object_=task_filter_options, annotation=TaskFilterOptions, direction="write" - ), - "annotation_filter_options": convert_and_respect_annotation_metadata( - object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" - ), - "serialization_options": convert_and_respect_annotation_metadata( - object_=serialization_options, annotation=SerializationOptions, direction="write" - ), - }, + _response = self._raw_client.create( + project_id, + title=title, + id=id, + created_by=created_by, + created_at=created_at, + finished_at=finished_at, + status=status, + md5=md5, + counters=counters, + converted_formats=converted_formats, + task_filter_options=task_filter_options, + annotation_filter_options=annotation_filter_options, + serialization_options=serialization_options, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportSnapshot, - parse_obj_as( - type_=ExportSnapshot, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def download( self, @@ -363,30 +299,15 @@ def download( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.Iterator[bytes] Exported data in binary format """ - with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", - method="GET", - params={ - "exportType": export_type, - }, - request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - for _chunk in _response.iter_bytes(chunk_size=_chunk_size): - yield _chunk - return - _response.read() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + with self._raw_client.download( + project_id, export_pk, export_type=export_type, request_options=request_options + ) as r: + yield from r.data def get( self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None @@ -427,24 +348,8 @@ def get( export_pk="export_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Export, - parse_obj_as( - type_=Export, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(project_id, export_pk, request_options=request_options) + return _response.data def delete( self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None @@ -482,18 +387,8 @@ def delete( export_pk="export_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(project_id, export_pk, request_options=request_options) + return _response.data def convert( self, @@ -547,37 +442,30 @@ def convert( export_pk="export_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", - method="POST", - json={ - "export_type": export_type, - "download_resources": download_resources, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.convert( + project_id, + export_pk, + export_type=export_type, + download_resources=download_resources, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportsConvertResponse, - parse_obj_as( - type_=ExportsConvertResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncExportsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawExportsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawExportsClient + """ + return self._raw_client async def download_sync( self, @@ -632,33 +520,21 @@ async def download_sync( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.AsyncIterator[bytes] Exported data in binary format """ - async with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(id)}/export", - method="GET", - params={ - "export_type": export_type, - "download_all_tasks": download_all_tasks, - "download_resources": download_resources, - "ids": ids, - }, + async with self._raw_client.download_sync( + id, + export_type=export_type, + download_all_tasks=download_all_tasks, + download_resources=download_resources, + ids=ids, request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size): - yield _chunk - return - await _response.aread() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + ) as r: + async for _chunk in r.data: + yield _chunk async def list_formats( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -701,24 +577,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportsListFormatsResponseItem], - parse_obj_as( - type_=typing.List[ExportsListFormatsResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_formats(id, request_options=request_options) + return _response.data async def list( self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -761,24 +621,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Export], - parse_obj_as( - type_=typing.List[Export], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project_id, request_options=request_options) + return _response.data async def create( self, @@ -864,49 +708,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="POST", - json={ - "title": title, - "id": id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=UserSimple, direction="write" - ), - "created_at": created_at, - "finished_at": finished_at, - "status": status, - "md5": md5, - "counters": counters, - "converted_formats": convert_and_respect_annotation_metadata( - object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" - ), - "task_filter_options": convert_and_respect_annotation_metadata( - object_=task_filter_options, annotation=TaskFilterOptions, direction="write" - ), - "annotation_filter_options": convert_and_respect_annotation_metadata( - object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" - ), - "serialization_options": convert_and_respect_annotation_metadata( - object_=serialization_options, annotation=SerializationOptions, direction="write" - ), - }, + _response = await self._raw_client.create( + project_id, + title=title, + id=id, + created_by=created_by, + created_at=created_at, + finished_at=finished_at, + status=status, + md5=md5, + counters=counters, + converted_formats=converted_formats, + task_filter_options=task_filter_options, + annotation_filter_options=annotation_filter_options, + serialization_options=serialization_options, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportSnapshot, - parse_obj_as( - type_=ExportSnapshot, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def download( self, @@ -938,30 +756,16 @@ async def download( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.AsyncIterator[bytes] Exported data in binary format """ - async with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", - method="GET", - params={ - "exportType": export_type, - }, - request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size): - yield _chunk - return - await _response.aread() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + async with self._raw_client.download( + project_id, export_pk, export_type=export_type, request_options=request_options + ) as r: + async for _chunk in r.data: + yield _chunk async def get( self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None @@ -1010,24 +814,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Export, - parse_obj_as( - type_=Export, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(project_id, export_pk, request_options=request_options) + return _response.data async def delete( self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None @@ -1073,18 +861,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(project_id, export_pk, request_options=request_options) + return _response.data async def convert( self, @@ -1146,29 +924,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", - method="POST", - json={ - "export_type": export_type, - "download_resources": download_resources, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.convert( + project_id, + export_pk, + export_type=export_type, + download_resources=download_resources, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportsConvertResponse, - parse_obj_as( - type_=ExportsConvertResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/projects/exports/raw_client.py b/src/label_studio_sdk/projects/exports/raw_client.py new file mode 100644 index 000000000..e6982638e --- /dev/null +++ b/src/label_studio_sdk/projects/exports/raw_client.py @@ -0,0 +1,1038 @@ +# This file was auto-generated by Fern from our API Definition. + +import contextlib +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.annotation_filter_options import AnnotationFilterOptions +from ...types.converted_format import ConvertedFormat +from ...types.export import Export +from ...types.export_format import ExportFormat +from ...types.export_snapshot import ExportSnapshot +from ...types.export_snapshot_status import ExportSnapshotStatus +from ...types.serialization_options import SerializationOptions +from ...types.task_filter_options import TaskFilterOptions +from ...types.user_simple import UserSimple +from .types.exports_convert_response import ExportsConvertResponse +from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawExportsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + @contextlib.contextmanager + def download_sync( + self, + id: int, + *, + export_type: typing.Optional[str] = None, + download_all_tasks: typing.Optional[bool] = None, + download_resources: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[HttpResponse[typing.Iterator[bytes]]]: + """ + + If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. + Export annotated tasks as a file in a specific format. + For example, to export JSON annotations for a project to a file called `annotations.json`, + run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export all tasks, including skipped tasks and others without annotations, run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export specific tasks with IDs of 123 and 345, run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + export_type : typing.Optional[str] + Selected export format (JSON by default) + + download_all_tasks : typing.Optional[bool] + + If true, download all tasks regardless of status. If false, download only annotated tasks. + + download_resources : typing.Optional[bool] + + If true, download all resource files such as images, audio, and others relevant to the tasks. + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + + Specify a list of task IDs to retrieve only the details for those tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.Iterator[HttpResponse[typing.Iterator[bytes]]] + Exported data in binary format + """ + with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(id)}/export", + method="GET", + params={ + "export_type": export_type, + "download_all_tasks": download_all_tasks, + "download_resources": download_resources, + "ids": ids, + }, + request_options=request_options, + ) as _response: + + def _stream() -> HttpResponse[typing.Iterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return HttpResponse( + response=_response, data=(_chunk for _chunk in _response.iter_bytes(chunk_size=_chunk_size)) + ) + _response.read() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield _stream() + + def list_formats( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ExportsListFormatsResponseItem]]: + """ + + Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ExportsListFormatsResponseItem]] + Export formats + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportsListFormatsResponseItem], + parse_obj_as( + type_=typing.List[ExportsListFormatsResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[Export]]: + """ + + Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Included in the response is information about each snapshot, such as who created it and what format it is in. + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Export]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + project_id: int, + *, + title: typing.Optional[str] = OMIT, + id: typing.Optional[int] = OMIT, + created_by: typing.Optional[UserSimple] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + finished_at: typing.Optional[dt.datetime] = OMIT, + status: typing.Optional[ExportSnapshotStatus] = OMIT, + md5: typing.Optional[str] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, + task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, + annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, + serialization_options: typing.Optional[SerializationOptions] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ExportSnapshot]: + """ + + Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. + + For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + + id : typing.Optional[int] + + created_by : typing.Optional[UserSimple] + + created_at : typing.Optional[dt.datetime] + Creation time + + finished_at : typing.Optional[dt.datetime] + Complete or fail time + + status : typing.Optional[ExportSnapshotStatus] + + md5 : typing.Optional[str] + + counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] + + task_filter_options : typing.Optional[TaskFilterOptions] + + annotation_filter_options : typing.Optional[AnnotationFilterOptions] + + serialization_options : typing.Optional[SerializationOptions] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ExportSnapshot] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="POST", + json={ + "title": title, + "id": id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, annotation=TaskFilterOptions, direction="write" + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, annotation=SerializationOptions, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportSnapshot, + parse_obj_as( + type_=ExportSnapshot, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + @contextlib.contextmanager + def download( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[HttpResponse[typing.Iterator[bytes]]]: + """ + + Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + Selected export format. JSON is available by default. For other formats, you need to convert the export first. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.Iterator[HttpResponse[typing.Iterator[bytes]]] + Exported data in binary format + """ + with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", + method="GET", + params={ + "exportType": export_type, + }, + request_options=request_options, + ) as _response: + + def _stream() -> HttpResponse[typing.Iterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return HttpResponse( + response=_response, data=(_chunk for _chunk in _response.iter_bytes(chunk_size=_chunk_size)) + ) + _response.read() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield _stream() + + def get( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[Export]: + """ + + Retrieve information about a specific export file (snapshot). + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Export] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete an export file by specified export ID. + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def convert( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = OMIT, + download_resources: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ExportsConvertResponse]: + """ + + You can use this to convert an export snapshot into the selected format. + + To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + + download_resources : typing.Optional[bool] + If true, download all resource files such as images, audio, and others relevant to the tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ExportsConvertResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", + method="POST", + json={ + "export_type": export_type, + "download_resources": download_resources, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportsConvertResponse, + parse_obj_as( + type_=ExportsConvertResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawExportsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + @contextlib.asynccontextmanager + async def download_sync( + self, + id: int, + *, + export_type: typing.Optional[str] = None, + download_all_tasks: typing.Optional[bool] = None, + download_resources: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]: + """ + + If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. + Export annotated tasks as a file in a specific format. + For example, to export JSON annotations for a project to a file called `annotations.json`, + run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export all tasks, including skipped tasks and others without annotations, run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export specific tasks with IDs of 123 and 345, run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + export_type : typing.Optional[str] + Selected export format (JSON by default) + + download_all_tasks : typing.Optional[bool] + + If true, download all tasks regardless of status. If false, download only annotated tasks. + + download_resources : typing.Optional[bool] + + If true, download all resource files such as images, audio, and others relevant to the tasks. + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + + Specify a list of task IDs to retrieve only the details for those tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]] + Exported data in binary format + """ + async with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(id)}/export", + method="GET", + params={ + "export_type": export_type, + "download_all_tasks": download_all_tasks, + "download_resources": download_resources, + "ids": ids, + }, + request_options=request_options, + ) as _response: + + async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return AsyncHttpResponse( + response=_response, + data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)), + ) + await _response.aread() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield await _stream() + + async def list_formats( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ExportsListFormatsResponseItem]]: + """ + + Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ExportsListFormatsResponseItem]] + Export formats + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportsListFormatsResponseItem], + parse_obj_as( + type_=typing.List[ExportsListFormatsResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Export]]: + """ + + Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Included in the response is information about each snapshot, such as who created it and what format it is in. + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Export]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + project_id: int, + *, + title: typing.Optional[str] = OMIT, + id: typing.Optional[int] = OMIT, + created_by: typing.Optional[UserSimple] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + finished_at: typing.Optional[dt.datetime] = OMIT, + status: typing.Optional[ExportSnapshotStatus] = OMIT, + md5: typing.Optional[str] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, + task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, + annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, + serialization_options: typing.Optional[SerializationOptions] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ExportSnapshot]: + """ + + Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. + + For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + + id : typing.Optional[int] + + created_by : typing.Optional[UserSimple] + + created_at : typing.Optional[dt.datetime] + Creation time + + finished_at : typing.Optional[dt.datetime] + Complete or fail time + + status : typing.Optional[ExportSnapshotStatus] + + md5 : typing.Optional[str] + + counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] + + task_filter_options : typing.Optional[TaskFilterOptions] + + annotation_filter_options : typing.Optional[AnnotationFilterOptions] + + serialization_options : typing.Optional[SerializationOptions] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ExportSnapshot] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="POST", + json={ + "title": title, + "id": id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, annotation=TaskFilterOptions, direction="write" + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, annotation=SerializationOptions, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportSnapshot, + parse_obj_as( + type_=ExportSnapshot, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + @contextlib.asynccontextmanager + async def download( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]: + """ + + Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + Selected export format. JSON is available by default. For other formats, you need to convert the export first. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]] + Exported data in binary format + """ + async with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", + method="GET", + params={ + "exportType": export_type, + }, + request_options=request_options, + ) as _response: + + async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return AsyncHttpResponse( + response=_response, + data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)), + ) + await _response.aread() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield await _stream() + + async def get( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Export]: + """ + + Retrieve information about a specific export file (snapshot). + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Export] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete an export file by specified export ID. + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def convert( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = OMIT, + download_resources: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ExportsConvertResponse]: + """ + + You can use this to convert an export snapshot into the selected format. + + To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + + download_resources : typing.Optional[bool] + If true, download all resource files such as images, audio, and others relevant to the tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ExportsConvertResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", + method="POST", + json={ + "export_type": export_type, + "download_resources": download_resources, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportsConvertResponse, + parse_obj_as( + type_=ExportsConvertResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/exports/types/__init__.py b/src/label_studio_sdk/projects/exports/types/__init__.py index 8ac8e7b26..45200ad5d 100644 --- a/src/label_studio_sdk/projects/exports/types/__init__.py +++ b/src/label_studio_sdk/projects/exports/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .exports_convert_response import ExportsConvertResponse from .exports_list_formats_response_item import ExportsListFormatsResponseItem diff --git a/src/label_studio_sdk/projects/exports/types/exports_convert_response.py b/src/label_studio_sdk/projects/exports/types/exports_convert_response.py index 1543ef9b9..3003e1e7a 100644 --- a/src/label_studio_sdk/projects/exports/types/exports_convert_response.py +++ b/src/label_studio_sdk/projects/exports/types/exports_convert_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing -from ....types.export_format import ExportFormat + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....types.export_format import ExportFormat class ExportsConvertResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py b/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py index 60bc39816..bbb67f01b 100644 --- a/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py +++ b/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing -from ....types.export_format import ExportFormat + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....types.export_format import ExportFormat class ExportsListFormatsResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/pauses/__init__.py b/src/label_studio_sdk/projects/pauses/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/projects/pauses/__init__.py +++ b/src/label_studio_sdk/projects/pauses/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/projects/pauses/client.py b/src/label_studio_sdk/projects/pauses/client.py index 2d9f1d318..2c1961330 100644 --- a/src/label_studio_sdk/projects/pauses/client.py +++ b/src/label_studio_sdk/projects/pauses/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.pause import Pause -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawPausesClient, RawPausesClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class PausesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawPausesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPausesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPausesClient + """ + return self._raw_client def list( self, @@ -60,27 +68,10 @@ def list( user_pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="GET", - params={ - "include_deleted": include_deleted, - }, - request_options=request_options, + _response = self._raw_client.list( + project_pk, user_pk, include_deleted=include_deleted, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Pause], - parse_obj_as( - type_=typing.List[Pause], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create( self, @@ -127,32 +118,10 @@ def create( reason="reason", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="POST", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + project_pk, user_pk, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -192,24 +161,8 @@ def get( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(project_pk, user_pk, id, request_options=request_options) + return _response.data def delete( self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -248,18 +201,8 @@ def delete( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(project_pk, user_pk, id, request_options=request_options) + return _response.data def update( self, @@ -311,37 +254,26 @@ def update( reason="reason", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="PATCH", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update( + project_pk, user_pk, id, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPausesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawPausesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPausesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPausesClient + """ + return self._raw_client async def list( self, @@ -393,27 +325,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="GET", - params={ - "include_deleted": include_deleted, - }, - request_options=request_options, + _response = await self._raw_client.list( + project_pk, user_pk, include_deleted=include_deleted, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Pause], - parse_obj_as( - type_=typing.List[Pause], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -468,32 +383,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="POST", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + project_pk, user_pk, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -541,24 +434,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(project_pk, user_pk, id, request_options=request_options) + return _response.data async def delete( self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -605,18 +482,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(project_pk, user_pk, id, request_options=request_options) + return _response.data async def update( self, @@ -676,29 +543,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="PATCH", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update( + project_pk, user_pk, id, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/projects/pauses/raw_client.py b/src/label_studio_sdk/projects/pauses/raw_client.py new file mode 100644 index 000000000..d109e48d4 --- /dev/null +++ b/src/label_studio_sdk/projects/pauses/raw_client.py @@ -0,0 +1,543 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.pause import Pause + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPausesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + project_pk: int, + user_pk: int, + *, + include_deleted: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[Pause]]: + """ + Return a list of pause objects for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + include_deleted : typing.Optional[bool] + Include deleted pauses + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Pause]] + Successfully retrieved a list of pauses + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="GET", + params={ + "include_deleted": include_deleted, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Pause], + parse_obj_as( + type_=typing.List[Pause], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + project_pk: int, + user_pk: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Pause]: + """ + Create a new pause object for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Pause] + Successfully created a pause + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="POST", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[Pause]: + """ + Return detailed information about a specific pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Pause] + Successfully retrieved the pause + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + Remove a pause from the database. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + project_pk: int, + user_pk: int, + id: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Pause]: + """ + Partially update one or more fields of an existing pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Pause] + Successfully updated the pause (partial) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="PATCH", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawPausesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + project_pk: int, + user_pk: int, + *, + include_deleted: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[Pause]]: + """ + Return a list of pause objects for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + include_deleted : typing.Optional[bool] + Include deleted pauses + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Pause]] + Successfully retrieved a list of pauses + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="GET", + params={ + "include_deleted": include_deleted, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Pause], + parse_obj_as( + type_=typing.List[Pause], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + project_pk: int, + user_pk: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Pause]: + """ + Create a new pause object for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Pause] + Successfully created a pause + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="POST", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Pause]: + """ + Return detailed information about a specific pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Pause] + Successfully retrieved the pause + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Remove a pause from the database. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + project_pk: int, + user_pk: int, + id: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Pause]: + """ + Partially update one or more fields of an existing pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Pause] + Successfully updated the pause (partial) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="PATCH", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/raw_client.py b/src/label_studio_sdk/projects/raw_client.py new file mode 100644 index 000000000..782602c5c --- /dev/null +++ b/src/label_studio_sdk/projects/raw_client.py @@ -0,0 +1,1245 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pagination import AsyncPager, BaseHttpResponse, SyncPager +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..types.project import Project +from ..types.project_label_config import ProjectLabelConfig +from .types.projects_create_response import ProjectsCreateResponse +from .types.projects_import_tasks_response import ProjectsImportTasksResponse +from .types.projects_list_response import ProjectsListResponse +from .types.projects_update_response import ProjectsUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawProjectsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + ordering: typing.Optional[str] = None, + ids: typing.Optional[str] = None, + title: typing.Optional[str] = None, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + workspaces: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> SyncPager[Project]: + """ + + Return a list of the projects within your organization. + + To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. + + To retrieve a list of your Label Studio projects, update the following command to match your own environment. + Replace the domain name, port, and authorization token, then run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' + ``` + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + ids : typing.Optional[str] + ids + + title : typing.Optional[str] + title + + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + workspaces : typing.Optional[int] + workspaces + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SyncPager[Project] + + """ + page = page if page is not None else 1 + + _response = self._client_wrapper.httpx_client.request( + "api/projects/", + method="GET", + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + "workspaces": workspaces, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.results + _has_next = True + _get_next = lambda: self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + workspaces=workspaces, + request_options=request_options, + ) + return SyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ProjectsCreateResponse]: + """ + + Create a project and set up the labeling interface. For more information about setting up projects, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + ```bash + curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' + ``` + + Parameters + ---------- + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectsCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/projects/", + method="POST", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Project]: + """ + Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Project] + Project information + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ProjectsUpdateResponse]: + """ + + Update the project settings for a specific project. For more information, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + + If you are modifying the labeling config for project that has in-progress work, note the following: + * You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. + * If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. + + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectsUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def import_tasks( + self, + id: int, + *, + request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], + commit_to_project: typing.Optional[bool] = None, + return_task_ids: typing.Optional[bool] = None, + preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ProjectsImportTasksResponse]: + """ + + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. + + + There are three possible ways to import tasks with this endpoint: + + #### 1\. **POST with data** + Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. + + Update this example to specify your authorization token and Label Studio instance host, then run the following from + the command line: + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ + -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' + ``` + + #### 2\. **POST with files** + Send tasks as files. You can attach multiple files with different names. + + - **JSON**: text files in JavaScript object notation format + - **CSV**: text files with tables in Comma Separated Values format + - **TSV**: text files with tables in Tab Separated Value format + - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only + + Update this example to specify your authorization token, Label Studio instance host, and file name and path, + then run the following from the command line: + + ```bash + curl -H 'Authorization: Token abc123' \ + -X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ + ``` + + #### 3\. **POST with URL** + You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ + -X POST 'https://localhost:8080/api/projects/1/import' \ + --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' + ``` + +
+ + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] + + commit_to_project : typing.Optional[bool] + Set to "true" to immediately commit tasks to the project. + + return_task_ids : typing.Optional[bool] + Set to "true" to return task IDs in the response. + + preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectsImportTasksResponse] + Tasks successfully imported + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/import", + method="POST", + params={ + "commit_to_project": commit_to_project, + "return_task_ids": return_task_ids, + "preannotated_from_fields": preannotated_from_fields, + }, + json=request, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate_config( + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ProjectLabelConfig]: + """ + + Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + label_config : str + Label config in XML format. See more about it in documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectLabelConfig] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/validate/", + method="POST", + json={ + "label_config": label_config, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawProjectsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + ordering: typing.Optional[str] = None, + ids: typing.Optional[str] = None, + title: typing.Optional[str] = None, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + workspaces: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncPager[Project]: + """ + + Return a list of the projects within your organization. + + To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. + + To retrieve a list of your Label Studio projects, update the following command to match your own environment. + Replace the domain name, port, and authorization token, then run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' + ``` + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + ids : typing.Optional[str] + ids + + title : typing.Optional[str] + title + + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + workspaces : typing.Optional[int] + workspaces + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncPager[Project] + + """ + page = page if page is not None else 1 + + _response = await self._client_wrapper.httpx_client.request( + "api/projects/", + method="GET", + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + "workspaces": workspaces, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.results + _has_next = True + + async def _get_next(): + return await self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + workspaces=workspaces, + request_options=request_options, + ) + + return AsyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ProjectsCreateResponse]: + """ + + Create a project and set up the labeling interface. For more information about setting up projects, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + ```bash + curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' + ``` + + Parameters + ---------- + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectsCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/projects/", + method="POST", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Project]: + """ + Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Project] + Project information + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ProjectsUpdateResponse]: + """ + + Update the project settings for a specific project. For more information, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + + If you are modifying the labeling config for project that has in-progress work, note the following: + * You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. + * If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. + + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectsUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def import_tasks( + self, + id: int, + *, + request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], + commit_to_project: typing.Optional[bool] = None, + return_task_ids: typing.Optional[bool] = None, + preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ProjectsImportTasksResponse]: + """ + + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. + + + There are three possible ways to import tasks with this endpoint: + + #### 1\. **POST with data** + Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. + + Update this example to specify your authorization token and Label Studio instance host, then run the following from + the command line: + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ + -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' + ``` + + #### 2\. **POST with files** + Send tasks as files. You can attach multiple files with different names. + + - **JSON**: text files in JavaScript object notation format + - **CSV**: text files with tables in Comma Separated Values format + - **TSV**: text files with tables in Tab Separated Value format + - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only + + Update this example to specify your authorization token, Label Studio instance host, and file name and path, + then run the following from the command line: + + ```bash + curl -H 'Authorization: Token abc123' \ + -X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ + ``` + + #### 3\. **POST with URL** + You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ + -X POST 'https://localhost:8080/api/projects/1/import' \ + --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' + ``` + +
+ + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] + + commit_to_project : typing.Optional[bool] + Set to "true" to immediately commit tasks to the project. + + return_task_ids : typing.Optional[bool] + Set to "true" to return task IDs in the response. + + preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectsImportTasksResponse] + Tasks successfully imported + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/import", + method="POST", + params={ + "commit_to_project": commit_to_project, + "return_task_ids": return_task_ids, + "preannotated_from_fields": preannotated_from_fields, + }, + json=request, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate_config( + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ProjectLabelConfig]: + """ + + Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + label_config : str + Label config in XML format. See more about it in documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectLabelConfig] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/validate/", + method="POST", + json={ + "label_config": label_config, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/types/__init__.py b/src/label_studio_sdk/projects/types/__init__.py index 8f2e80d13..71e3306e4 100644 --- a/src/label_studio_sdk/projects/types/__init__.py +++ b/src/label_studio_sdk/projects/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .projects_create_response import ProjectsCreateResponse from .projects_import_tasks_response import ProjectsImportTasksResponse from .projects_list_response import ProjectsListResponse diff --git a/src/label_studio_sdk/projects/types/projects_create_response.py b/src/label_studio_sdk/projects/types/projects_create_response.py index b56d864a8..f8f6a555c 100644 --- a/src/label_studio_sdk/projects/types/projects_create_response.py +++ b/src/label_studio_sdk/projects/types/projects_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ProjectsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py index 66adc2d99..24f31d790 100644 --- a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py +++ b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ProjectsImportTasksResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_list_response.py b/src/label_studio_sdk/projects/types/projects_list_response.py index 6d16d6b32..98a5363a7 100644 --- a/src/label_studio_sdk/projects/types/projects_list_response.py +++ b/src/label_studio_sdk/projects/types/projects_list_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...types.project import Project -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...types.project import Project class ProjectsListResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_update_response.py b/src/label_studio_sdk/projects/types/projects_update_response.py index 5034b9c8b..ae66d5688 100644 --- a/src/label_studio_sdk/projects/types/projects_update_response.py +++ b/src/label_studio_sdk/projects/types/projects_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ProjectsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/__init__.py b/src/label_studio_sdk/prompts/__init__.py index a9ec1fd8f..7104d2f12 100644 --- a/src/label_studio_sdk/prompts/__init__.py +++ b/src/label_studio_sdk/prompts/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, PromptsBatchFailedPredictionsResponse, diff --git a/src/label_studio_sdk/prompts/client.py b/src/label_studio_sdk/prompts/client.py index a48c2da4f..54cafd816 100644 --- a/src/label_studio_sdk/prompts/client.py +++ b/src/label_studio_sdk/prompts/client.py @@ -1,31 +1,24 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ..core.client_wrapper import SyncClientWrapper -from .versions.client import VersionsClient -from .runs.client import RunsClient -from .indicators.client import IndicatorsClient + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.prompt import Prompt -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem from ..types.prompt_created_by import PromptCreatedBy -import datetime as dt from ..types.prompt_organization import PromptOrganization -from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.jsonable_encoder import jsonable_encoder -from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem -from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse +from .indicators.client import AsyncIndicatorsClient, IndicatorsClient +from .raw_client import AsyncRawPromptsClient, RawPromptsClient +from .runs.client import AsyncRunsClient, RunsClient from .types.prompts_batch_failed_predictions_request_failed_predictions_item import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, ) from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse -from ..core.client_wrapper import AsyncClientWrapper -from .versions.client import AsyncVersionsClient -from .runs.client import AsyncRunsClient -from .indicators.client import AsyncIndicatorsClient +from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem +from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse +from .versions.client import AsyncVersionsClient, VersionsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -33,10 +26,23 @@ class PromptsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.versions = VersionsClient(client_wrapper=self._client_wrapper) - self.runs = RunsClient(client_wrapper=self._client_wrapper) - self.indicators = IndicatorsClient(client_wrapper=self._client_wrapper) + self._raw_client = RawPromptsClient(client_wrapper=client_wrapper) + self.versions = VersionsClient(client_wrapper=client_wrapper) + + self.runs = RunsClient(client_wrapper=client_wrapper) + + self.indicators = IndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPromptsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPromptsClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: """ @@ -61,24 +67,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.prompts.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/prompts/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prompt], - parse_obj_as( - type_=typing.List[Prompt], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -151,45 +141,20 @@ def create( output_classes=["output_classes"], ) """ - _response = self._client_wrapper.httpx_client.request( - "api/prompts/", - method="POST", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = self._raw_client.create( + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt: """ @@ -219,24 +184,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -265,18 +214,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -354,45 +293,21 @@ def update( output_classes=["output_classes"], ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = self._raw_client.update( + id, + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def batch_predictions( self, @@ -428,36 +343,10 @@ def batch_predictions( ) client.prompts.batch_predictions() """ - _response = self._client_wrapper.httpx_client.request( - "api/model-run/batch-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "results": convert_and_respect_annotation_metadata( - object_=results, - annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.batch_predictions( + modelrun_id=modelrun_id, results=results, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchPredictionsResponse, - parse_obj_as( - type_=PromptsBatchPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def batch_failed_predictions( self, @@ -495,44 +384,31 @@ def batch_failed_predictions( ) client.prompts.batch_failed_predictions() """ - _response = self._client_wrapper.httpx_client.request( - "api/model-run/batch-failed-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "failed_predictions": convert_and_respect_annotation_metadata( - object_=failed_predictions, - annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.batch_failed_predictions( + modelrun_id=modelrun_id, failed_predictions=failed_predictions, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchFailedPredictionsResponse, - parse_obj_as( - type_=PromptsBatchFailedPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPromptsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.versions = AsyncVersionsClient(client_wrapper=self._client_wrapper) - self.runs = AsyncRunsClient(client_wrapper=self._client_wrapper) - self.indicators = AsyncIndicatorsClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawPromptsClient(client_wrapper=client_wrapper) + self.versions = AsyncVersionsClient(client_wrapper=client_wrapper) + + self.runs = AsyncRunsClient(client_wrapper=client_wrapper) + + self.indicators = AsyncIndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPromptsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPromptsClient + """ + return self._raw_client async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: """ @@ -565,24 +441,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/prompts/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prompt], - parse_obj_as( - type_=typing.List[Prompt], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -663,45 +523,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/prompts/", - method="POST", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = await self._raw_client.create( + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt: """ @@ -739,24 +574,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -793,18 +612,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -890,45 +699,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = await self._raw_client.update( + id, + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def batch_predictions( self, @@ -972,36 +757,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-run/batch-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "results": convert_and_respect_annotation_metadata( - object_=results, - annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.batch_predictions( + modelrun_id=modelrun_id, results=results, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchPredictionsResponse, - parse_obj_as( - type_=PromptsBatchPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def batch_failed_predictions( self, @@ -1047,33 +806,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-run/batch-failed-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "failed_predictions": convert_and_respect_annotation_metadata( - object_=failed_predictions, - annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.batch_failed_predictions( + modelrun_id=modelrun_id, failed_predictions=failed_predictions, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchFailedPredictionsResponse, - parse_obj_as( - type_=PromptsBatchFailedPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/prompts/indicators/__init__.py b/src/label_studio_sdk/prompts/indicators/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/prompts/indicators/__init__.py +++ b/src/label_studio_sdk/prompts/indicators/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/prompts/indicators/client.py b/src/label_studio_sdk/prompts/indicators/client.py index 684dbe469..b3ec7bc01 100644 --- a/src/label_studio_sdk/prompts/indicators/client.py +++ b/src/label_studio_sdk/prompts/indicators/client.py @@ -1,20 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.client_wrapper import SyncClientWrapper import typing + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions -from ...types.key_indicators import KeyIndicators -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...types.key_indicator_value import KeyIndicatorValue -from ...core.client_wrapper import AsyncClientWrapper +from ...types.key_indicators import KeyIndicators +from .raw_client import AsyncRawIndicatorsClient, RawIndicatorsClient class IndicatorsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawIndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawIndicatorsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawIndicatorsClient + """ + return self._raw_client def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> KeyIndicators: """ @@ -44,24 +52,8 @@ def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = No pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicators, - parse_obj_as( - type_=KeyIndicators, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(pk, request_options=request_options) + return _response.data def get( self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None @@ -97,29 +89,24 @@ def get( pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicatorValue, - parse_obj_as( - type_=KeyIndicatorValue, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(indicator_key, pk, request_options=request_options) + return _response.data class AsyncIndicatorsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawIndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawIndicatorsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawIndicatorsClient + """ + return self._raw_client async def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> KeyIndicators: """ @@ -157,24 +144,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicators, - parse_obj_as( - type_=KeyIndicators, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(pk, request_options=request_options) + return _response.data async def get( self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None @@ -218,21 +189,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicatorValue, - parse_obj_as( - type_=KeyIndicatorValue, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(indicator_key, pk, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/prompts/indicators/raw_client.py b/src/label_studio_sdk/prompts/indicators/raw_client.py new file mode 100644 index 000000000..43bdb6b8e --- /dev/null +++ b/src/label_studio_sdk/prompts/indicators/raw_client.py @@ -0,0 +1,183 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.key_indicator_value import KeyIndicatorValue +from ...types.key_indicators import KeyIndicators + + +class RawIndicatorsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[KeyIndicators]: + """ + Get key indicators for the Prompt dashboard. + + Parameters + ---------- + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[KeyIndicators] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[KeyIndicatorValue]: + """ + Get a specific key indicator for the Prompt dashboard. + + Parameters + ---------- + indicator_key : str + Key of the indicator + + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[KeyIndicatorValue] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawIndicatorsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[KeyIndicators]: + """ + Get key indicators for the Prompt dashboard. + + Parameters + ---------- + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[KeyIndicators] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[KeyIndicatorValue]: + """ + Get a specific key indicator for the Prompt dashboard. + + Parameters + ---------- + indicator_key : str + Key of the indicator + + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[KeyIndicatorValue] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/raw_client.py b/src/label_studio_sdk/prompts/raw_client.py new file mode 100644 index 000000000..371eb1317 --- /dev/null +++ b/src/label_studio_sdk/prompts/raw_client.py @@ -0,0 +1,890 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.prompt import Prompt +from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem +from ..types.prompt_created_by import PromptCreatedBy +from ..types.prompt_organization import PromptOrganization +from .types.prompts_batch_failed_predictions_request_failed_predictions_item import ( + PromptsBatchFailedPredictionsRequestFailedPredictionsItem, +) +from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse +from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem +from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPromptsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[Prompt]]: + """ + Get a list of prompts. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Prompt]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prompt]: + """ + Create a new prompt. + + Parameters + ---------- + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Prompt]: + """ + Get a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prompt]: + """ + Update a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def batch_predictions( + self, + *, + modelrun_id: typing.Optional[int] = OMIT, + results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptsBatchPredictionsResponse]: + """ + Create a new batch prediction. + + Parameters + ---------- + modelrun_id : typing.Optional[int] + Model Run ID to associate the prediction with + + results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptsBatchPredictionsResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def batch_failed_predictions( + self, + *, + modelrun_id: typing.Optional[int] = OMIT, + failed_predictions: typing.Optional[ + typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] + ] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptsBatchFailedPredictionsResponse]: + """ + Create a new batch of failed predictions. + + Parameters + ---------- + modelrun_id : typing.Optional[int] + Model Run ID where the failed predictions came from + + failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptsBatchFailedPredictionsResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-run/batch-failed-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawPromptsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Prompt]]: + """ + Get a list of prompts. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Prompt]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prompt]: + """ + Create a new prompt. + + Parameters + ---------- + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Prompt]: + """ + Get a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prompt]: + """ + Update a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def batch_predictions( + self, + *, + modelrun_id: typing.Optional[int] = OMIT, + results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptsBatchPredictionsResponse]: + """ + Create a new batch prediction. + + Parameters + ---------- + modelrun_id : typing.Optional[int] + Model Run ID to associate the prediction with + + results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptsBatchPredictionsResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def batch_failed_predictions( + self, + *, + modelrun_id: typing.Optional[int] = OMIT, + failed_predictions: typing.Optional[ + typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] + ] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptsBatchFailedPredictionsResponse]: + """ + Create a new batch of failed predictions. + + Parameters + ---------- + modelrun_id : typing.Optional[int] + Model Run ID where the failed predictions came from + + failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptsBatchFailedPredictionsResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-run/batch-failed-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/runs/__init__.py b/src/label_studio_sdk/prompts/runs/__init__.py index b92e5ed7e..549df0e78 100644 --- a/src/label_studio_sdk/prompts/runs/__init__.py +++ b/src/label_studio_sdk/prompts/runs/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import RunsListRequestProjectSubset __all__ = ["RunsListRequestProjectSubset"] diff --git a/src/label_studio_sdk/prompts/runs/client.py b/src/label_studio_sdk/prompts/runs/client.py index 7de7b025e..4adb7cf23 100644 --- a/src/label_studio_sdk/prompts/runs/client.py +++ b/src/label_studio_sdk/prompts/runs/client.py @@ -1,21 +1,17 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ...core.client_wrapper import SyncClientWrapper -from .types.runs_list_request_project_subset import RunsListRequestProjectSubset + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.inference_run import InferenceRun -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...types.inference_run_project_subset import InferenceRunProjectSubset -from ...types.inference_run_organization import InferenceRunOrganization from ...types.inference_run_created_by import InferenceRunCreatedBy +from ...types.inference_run_organization import InferenceRunOrganization +from ...types.inference_run_project_subset import InferenceRunProjectSubset from ...types.inference_run_status import InferenceRunStatus -import datetime as dt -from ...core.serialization import convert_and_respect_annotation_metadata -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawRunsClient, RawRunsClient +from .types.runs_list_request_project_subset import RunsListRequestProjectSubset # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +19,18 @@ class RunsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawRunsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawRunsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawRunsClient + """ + return self._raw_client def list( self, @@ -73,28 +80,10 @@ def list( project_subset="All", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="GET", - params={ - "project": project, - "project_subset": project_subset, - }, - request_options=request_options, + _response = self._raw_client.list( + id, version_id, project=project, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create( self, @@ -169,47 +158,39 @@ def create( project_subset="All", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="POST", - json={ - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=InferenceRunOrganization, direction="write" - ), - "project": project, - "model_version": model_version, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=InferenceRunCreatedBy, direction="write" - ), - "project_subset": project_subset, - "status": status, - "job_id": job_id, - "created_at": created_at, - "triggered_at": triggered_at, - "predictions_updated_at": predictions_updated_at, - "completed_at": completed_at, - }, + _response = self._raw_client.create( + id, + version_id, + project=project, + project_subset=project_subset, + organization=organization, + model_version=model_version, + created_by=created_by, + status=status, + job_id=job_id, + created_at=created_at, + triggered_at=triggered_at, + predictions_updated_at=predictions_updated_at, + completed_at=completed_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncRunsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawRunsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawRunsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawRunsClient + """ + return self._raw_client async def list( self, @@ -267,28 +248,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="GET", - params={ - "project": project, - "project_subset": project_subset, - }, - request_options=request_options, + _response = await self._raw_client.list( + id, version_id, project=project, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -371,39 +334,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="POST", - json={ - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=InferenceRunOrganization, direction="write" - ), - "project": project, - "model_version": model_version, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=InferenceRunCreatedBy, direction="write" - ), - "project_subset": project_subset, - "status": status, - "job_id": job_id, - "created_at": created_at, - "triggered_at": triggered_at, - "predictions_updated_at": predictions_updated_at, - "completed_at": completed_at, - }, + _response = await self._raw_client.create( + id, + version_id, + project=project, + project_subset=project_subset, + organization=organization, + model_version=model_version, + created_by=created_by, + status=status, + job_id=job_id, + created_at=created_at, + triggered_at=triggered_at, + predictions_updated_at=predictions_updated_at, + completed_at=completed_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/prompts/runs/raw_client.py b/src/label_studio_sdk/prompts/runs/raw_client.py new file mode 100644 index 000000000..e2c694d33 --- /dev/null +++ b/src/label_studio_sdk/prompts/runs/raw_client.py @@ -0,0 +1,348 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.inference_run import InferenceRun +from ...types.inference_run_created_by import InferenceRunCreatedBy +from ...types.inference_run_organization import InferenceRunOrganization +from ...types.inference_run_project_subset import InferenceRunProjectSubset +from ...types.inference_run_status import InferenceRunStatus +from .types.runs_list_request_project_subset import RunsListRequestProjectSubset + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawRunsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: RunsListRequestProjectSubset, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[InferenceRun]: + """ + Get information (status, etadata, etc) about an existing inference run + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + The ID of the project that this Interence Run makes predictions on + + project_subset : RunsListRequestProjectSubset + Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[InferenceRun] + Success + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="GET", + params={ + "project": project, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: InferenceRunProjectSubset, + organization: typing.Optional[InferenceRunOrganization] = OMIT, + model_version: typing.Optional[int] = OMIT, + created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, + status: typing.Optional[InferenceRunStatus] = OMIT, + job_id: typing.Optional[str] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + triggered_at: typing.Optional[dt.datetime] = OMIT, + predictions_updated_at: typing.Optional[dt.datetime] = OMIT, + completed_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[InferenceRun]: + """ + Run a prompt inference. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + + project_subset : InferenceRunProjectSubset + + organization : typing.Optional[InferenceRunOrganization] + + model_version : typing.Optional[int] + + created_by : typing.Optional[InferenceRunCreatedBy] + + status : typing.Optional[InferenceRunStatus] + + job_id : typing.Optional[str] + + created_at : typing.Optional[dt.datetime] + + triggered_at : typing.Optional[dt.datetime] + + predictions_updated_at : typing.Optional[dt.datetime] + + completed_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[InferenceRun] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=InferenceRunOrganization, direction="write" + ), + "project": project, + "model_version": model_version, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=InferenceRunCreatedBy, direction="write" + ), + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawRunsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: RunsListRequestProjectSubset, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[InferenceRun]: + """ + Get information (status, etadata, etc) about an existing inference run + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + The ID of the project that this Interence Run makes predictions on + + project_subset : RunsListRequestProjectSubset + Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[InferenceRun] + Success + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="GET", + params={ + "project": project, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: InferenceRunProjectSubset, + organization: typing.Optional[InferenceRunOrganization] = OMIT, + model_version: typing.Optional[int] = OMIT, + created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, + status: typing.Optional[InferenceRunStatus] = OMIT, + job_id: typing.Optional[str] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + triggered_at: typing.Optional[dt.datetime] = OMIT, + predictions_updated_at: typing.Optional[dt.datetime] = OMIT, + completed_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[InferenceRun]: + """ + Run a prompt inference. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + + project_subset : InferenceRunProjectSubset + + organization : typing.Optional[InferenceRunOrganization] + + model_version : typing.Optional[int] + + created_by : typing.Optional[InferenceRunCreatedBy] + + status : typing.Optional[InferenceRunStatus] + + job_id : typing.Optional[str] + + created_at : typing.Optional[dt.datetime] + + triggered_at : typing.Optional[dt.datetime] + + predictions_updated_at : typing.Optional[dt.datetime] + + completed_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[InferenceRun] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=InferenceRunOrganization, direction="write" + ), + "project": project, + "model_version": model_version, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=InferenceRunCreatedBy, direction="write" + ), + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/runs/types/__init__.py b/src/label_studio_sdk/prompts/runs/types/__init__.py index 81dbca787..8b1f7f214 100644 --- a/src/label_studio_sdk/prompts/runs/types/__init__.py +++ b/src/label_studio_sdk/prompts/runs/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .runs_list_request_project_subset import RunsListRequestProjectSubset __all__ = ["RunsListRequestProjectSubset"] diff --git a/src/label_studio_sdk/prompts/types/__init__.py b/src/label_studio_sdk/prompts/types/__init__.py index aa63b5ae5..cd753f61e 100644 --- a/src/label_studio_sdk/prompts/types/__init__.py +++ b/src/label_studio_sdk/prompts/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .prompts_batch_failed_predictions_request_failed_predictions_item import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, ) diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py index f58cf15b8..87c9cbf41 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchFailedPredictionsRequestFailedPredictionsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py index 210085456..ad4d5a758 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchFailedPredictionsResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py index d46f78c4d..43c268c94 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchPredictionsRequestResultsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py index befabdace..30463c2be 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchPredictionsResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/versions/__init__.py b/src/label_studio_sdk/prompts/versions/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/prompts/versions/__init__.py +++ b/src/label_studio_sdk/prompts/versions/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/prompts/versions/client.py b/src/label_studio_sdk/prompts/versions/client.py index 6c17c2424..7baf50d7c 100644 --- a/src/label_studio_sdk/prompts/versions/client.py +++ b/src/label_studio_sdk/prompts/versions/client.py @@ -1,21 +1,17 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions +from ...types.inference_run_cost_estimate import InferenceRunCostEstimate from ...types.prompt_version import PromptVersion -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...types.prompt_version_provider import PromptVersionProvider from ...types.prompt_version_created_by import PromptVersionCreatedBy -import datetime as dt from ...types.prompt_version_organization import PromptVersionOrganization -from ...core.serialization import convert_and_respect_annotation_metadata -from ...types.inference_run_cost_estimate import InferenceRunCostEstimate +from ...types.prompt_version_provider import PromptVersionProvider from ...types.refined_prompt_response import RefinedPromptResponse -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawVersionsClient, RawVersionsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +19,18 @@ class VersionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawVersionsClient + """ + return self._raw_client def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[PromptVersion]: """ @@ -53,24 +60,8 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[PromptVersion], - parse_obj_as( - type_=typing.List[PromptVersion], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, request_options=request_options) + return _response.data def create( self, @@ -135,41 +126,21 @@ def create( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="POST", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, + _response = self._raw_client.create( + id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -205,24 +176,8 @@ def get( version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, version_id, request_options=request_options) + return _response.data def delete(self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -255,18 +210,8 @@ def delete(self, id: int, version_id: int, *, request_options: typing.Optional[R version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, version_id, request_options=request_options) + return _response.data def update( self, @@ -336,41 +281,22 @@ def update( version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="PATCH", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, + _response = self._raw_client.update( + id, + version_id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def cost_estimate( self, @@ -420,28 +346,10 @@ def cost_estimate( project_subset=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", - method="POST", - params={ - "project_id": project_id, - "project_subset": project_subset, - }, - request_options=request_options, + _response = self._raw_client.cost_estimate( + prompt_id, version_id, project_id=project_id, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRunCostEstimate, - parse_obj_as( - type_=InferenceRunCostEstimate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_refined_prompt( self, @@ -486,27 +394,10 @@ def get_refined_prompt( refinement_job_id="refinement_job_id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="GET", - params={ - "refinement_job_id": refinement_job_id, - }, - request_options=request_options, + _response = self._raw_client.get_refined_prompt( + prompt_id, version_id, refinement_job_id=refinement_job_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def refine_prompt( self, @@ -562,41 +453,32 @@ def refine_prompt( version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="POST", - params={ - "async": async_, - }, - json={ - "teacher_model_provider_connection_id": teacher_model_provider_connection_id, - "teacher_model_name": teacher_model_name, - "project_id": project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.refine_prompt( + prompt_id, + version_id, + async_=async_, + teacher_model_provider_connection_id=teacher_model_provider_connection_id, + teacher_model_name=teacher_model_name, + project_id=project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncVersionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawVersionsClient + """ + return self._raw_client async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -636,24 +518,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[PromptVersion], - parse_obj_as( - type_=typing.List[PromptVersion], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, request_options=request_options) + return _response.data async def create( self, @@ -726,41 +592,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="POST", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, + _response = await self._raw_client.create( + id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -804,24 +650,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, version_id, request_options=request_options) + return _response.data async def delete( self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -864,18 +694,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, version_id, request_options=request_options) + return _response.data async def update( self, @@ -953,41 +773,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="PATCH", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, + _response = await self._raw_client.update( + id, + version_id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def cost_estimate( self, @@ -1045,28 +846,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", - method="POST", - params={ - "project_id": project_id, - "project_subset": project_subset, - }, - request_options=request_options, + _response = await self._raw_client.cost_estimate( + prompt_id, version_id, project_id=project_id, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRunCostEstimate, - parse_obj_as( - type_=InferenceRunCostEstimate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_refined_prompt( self, @@ -1119,27 +902,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="GET", - params={ - "refinement_job_id": refinement_job_id, - }, - request_options=request_options, + _response = await self._raw_client.get_refined_prompt( + prompt_id, version_id, refinement_job_id=refinement_job_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def refine_prompt( self, @@ -1203,33 +969,13 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="POST", - params={ - "async": async_, - }, - json={ - "teacher_model_provider_connection_id": teacher_model_provider_connection_id, - "teacher_model_name": teacher_model_name, - "project_id": project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.refine_prompt( + prompt_id, + version_id, + async_=async_, + teacher_model_provider_connection_id=teacher_model_provider_connection_id, + teacher_model_name=teacher_model_name, + project_id=project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/prompts/versions/raw_client.py b/src/label_studio_sdk/prompts/versions/raw_client.py new file mode 100644 index 000000000..2892a6e79 --- /dev/null +++ b/src/label_studio_sdk/prompts/versions/raw_client.py @@ -0,0 +1,1008 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.inference_run_cost_estimate import InferenceRunCostEstimate +from ...types.prompt_version import PromptVersion +from ...types.prompt_version_created_by import PromptVersionCreatedBy +from ...types.prompt_version_organization import PromptVersionOrganization +from ...types.prompt_version_provider import PromptVersionProvider +from ...types.refined_prompt_response import RefinedPromptResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawVersionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[PromptVersion]]: + """ + Get a list of prompt versions. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[PromptVersion]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptVersion]: + """ + Create a new version of a prompt. + + Parameters + ---------- + id : int + Prompt ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptVersion] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[PromptVersion]: + """ + Get a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptVersion] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + Delete a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + version_id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptVersion]: + """ + Update a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptVersion] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="PATCH", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def cost_estimate( + self, + prompt_id: int, + version_id: int, + *, + project_id: int, + project_subset: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[InferenceRunCostEstimate]: + """ + Get cost estimate for running a prompt version on a particular project/subset + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + project_id : int + ID of the project to get an estimate for running on + + project_subset : int + Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT') + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[InferenceRunCostEstimate] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", + method="POST", + params={ + "project_id": project_id, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get_refined_prompt( + self, + prompt_id: int, + version_id: int, + *, + refinement_job_id: str, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RefinedPromptResponse]: + """ + Get the refined prompt based on the `refinement_job_id`. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + refinement_job_id : str + Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RefinedPromptResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="GET", + params={ + "refinement_job_id": refinement_job_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def refine_prompt( + self, + prompt_id: int, + version_id: int, + *, + async_: typing.Optional[bool] = None, + teacher_model_provider_connection_id: typing.Optional[int] = OMIT, + teacher_model_name: typing.Optional[str] = OMIT, + project_id: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RefinedPromptResponse]: + """ + Refine a prompt version using a teacher model and save the refined prompt as a new version. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Base Prompt Version ID + + async_ : typing.Optional[bool] + Run the refinement job asynchronously + + teacher_model_provider_connection_id : typing.Optional[int] + Model Provider Connection ID to use to refine the prompt + + teacher_model_name : typing.Optional[str] + Name of the model to use to refine the prompt + + project_id : typing.Optional[int] + Project ID to target the refined prompt for + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RefinedPromptResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="POST", + params={ + "async": async_, + }, + json={ + "teacher_model_provider_connection_id": teacher_model_provider_connection_id, + "teacher_model_name": teacher_model_name, + "project_id": project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawVersionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[PromptVersion]]: + """ + Get a list of prompt versions. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[PromptVersion]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptVersion]: + """ + Create a new version of a prompt. + + Parameters + ---------- + id : int + Prompt ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptVersion] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[PromptVersion]: + """ + Get a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptVersion] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + version_id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptVersion]: + """ + Update a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptVersion] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="PATCH", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def cost_estimate( + self, + prompt_id: int, + version_id: int, + *, + project_id: int, + project_subset: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[InferenceRunCostEstimate]: + """ + Get cost estimate for running a prompt version on a particular project/subset + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + project_id : int + ID of the project to get an estimate for running on + + project_subset : int + Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT') + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[InferenceRunCostEstimate] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", + method="POST", + params={ + "project_id": project_id, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get_refined_prompt( + self, + prompt_id: int, + version_id: int, + *, + refinement_job_id: str, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RefinedPromptResponse]: + """ + Get the refined prompt based on the `refinement_job_id`. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + refinement_job_id : str + Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RefinedPromptResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="GET", + params={ + "refinement_job_id": refinement_job_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def refine_prompt( + self, + prompt_id: int, + version_id: int, + *, + async_: typing.Optional[bool] = None, + teacher_model_provider_connection_id: typing.Optional[int] = OMIT, + teacher_model_name: typing.Optional[str] = OMIT, + project_id: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RefinedPromptResponse]: + """ + Refine a prompt version using a teacher model and save the refined prompt as a new version. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Base Prompt Version ID + + async_ : typing.Optional[bool] + Run the refinement job asynchronously + + teacher_model_provider_connection_id : typing.Optional[int] + Model Provider Connection ID to use to refine the prompt + + teacher_model_name : typing.Optional[str] + Name of the model to use to refine the prompt + + project_id : typing.Optional[int] + Project ID to target the refined prompt for + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RefinedPromptResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="POST", + params={ + "async": async_, + }, + json={ + "teacher_model_provider_connection_id": teacher_model_provider_connection_id, + "teacher_model_name": teacher_model_name, + "project_id": project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/tasks/__init__.py b/src/label_studio_sdk/tasks/__init__.py index 06c13c543..f5f953fad 100644 --- a/src/label_studio_sdk/tasks/__init__.py +++ b/src/label_studio_sdk/tasks/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import TasksListRequestFields, TasksListResponse __all__ = ["TasksListRequestFields", "TasksListResponse"] diff --git a/src/label_studio_sdk/tasks/client.py b/src/label_studio_sdk/tasks/client.py index 09b44df5f..1152d0300 100644 --- a/src/label_studio_sdk/tasks/client.py +++ b/src/label_studio_sdk/tasks/client.py @@ -1,21 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.pagination import AsyncPager, SyncPager from ..core.request_options import RequestOptions -from ..types.project_import import ProjectImport -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.tasks_list_request_fields import TasksListRequestFields -from ..core.pagination import SyncPager -from ..types.task import Task -from .types.tasks_list_response import TasksListResponse from ..types.base_task import BaseTask from ..types.data_manager_task_serializer import DataManagerTaskSerializer -from ..core.client_wrapper import AsyncClientWrapper -from ..core.pagination import AsyncPager +from ..types.project_import import ProjectImport +from ..types.task import Task +from .raw_client import AsyncRawTasksClient, RawTasksClient +from .types.tasks_list_request_fields import TasksListRequestFields # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +18,18 @@ class TasksClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawTasksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawTasksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawTasksClient + """ + return self._raw_client def create_many_status( self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None @@ -65,24 +71,8 @@ def create_many_status( import_pk="import_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectImport, - parse_obj_as( - type_=ProjectImport, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create_many_status(id, import_pk, request_options=request_options) + return _response.data def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -114,18 +104,8 @@ def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestO id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete_all_tasks(id, request_options=request_options) + return _response.data def list( self, @@ -205,51 +185,18 @@ def list( for page in response.iter_pages(): yield page """ - page = page if page is not None else 1 - _response = self._client_wrapper.httpx_client.request( - "api/tasks/", - method="GET", - params={ - "page": page, - "page_size": page_size, - "view": view, - "project": project, - "resolve_uri": resolve_uri, - "fields": fields, - "review": review, - "include": include, - "query": query, - }, + return self._raw_client.list( + page=page, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - TasksListResponse, - parse_obj_as( - type_=TasksListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - _items = _parsed_response.tasks - return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -294,32 +241,8 @@ def create( project=1, ) """ - _response = self._client_wrapper.httpx_client.request( - "api/tasks/", - method="POST", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DataManagerTaskSerializer: """ @@ -351,24 +274,8 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - DataManagerTaskSerializer, - parse_obj_as( - type_=DataManagerTaskSerializer, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -402,18 +309,8 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -461,37 +358,24 @@ def update( project=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data class AsyncTasksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawTasksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawTasksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawTasksClient + """ + return self._raw_client async def create_many_status( self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None @@ -541,24 +425,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectImport, - parse_obj_as( - type_=ProjectImport, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create_many_status(id, import_pk, request_options=request_options) + return _response.data async def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -598,18 +466,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete_all_tasks(id, request_options=request_options) + return _response.data async def list( self, @@ -690,6 +548,7 @@ async def main() -> None: response = await client.tasks.list() async for item in response: yield item + # alternatively, you can paginate page-by-page async for page in response.iter_pages(): yield page @@ -697,51 +556,18 @@ async def main() -> None: asyncio.run(main()) """ - page = page if page is not None else 1 - _response = await self._client_wrapper.httpx_client.request( - "api/tasks/", - method="GET", - params={ - "page": page, - "page_size": page_size, - "view": view, - "project": project, - "resolve_uri": resolve_uri, - "fields": fields, - "review": review, - "include": include, - "query": query, - }, + return await self._raw_client.list( + page=page, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - TasksListResponse, - parse_obj_as( - type_=TasksListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - _items = _parsed_response.tasks - return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -797,32 +623,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/tasks/", - method="POST", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data async def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -864,24 +666,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - DataManagerTaskSerializer, - parse_obj_as( - type_=DataManagerTaskSerializer, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -923,18 +709,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -993,29 +769,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/tasks/raw_client.py b/src/label_studio_sdk/tasks/raw_client.py new file mode 100644 index 000000000..152ab9860 --- /dev/null +++ b/src/label_studio_sdk/tasks/raw_client.py @@ -0,0 +1,816 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pagination import AsyncPager, BaseHttpResponse, SyncPager +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.base_task import BaseTask +from ..types.data_manager_task_serializer import DataManagerTaskSerializer +from ..types.project_import import ProjectImport +from ..types.task import Task +from .types.tasks_list_request_fields import TasksListRequestFields +from .types.tasks_list_response import TasksListResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawTasksClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create_many_status( + self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ProjectImport]: + """ + + Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. + + You will need the project ID and the unique ID of the import operation. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + The import ID is returned as part of the response when you call [Import tasks](import-tasks). + + Parameters + ---------- + id : int + The project ID. + + import_pk : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectImport] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete_all_tasks( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete all tasks from a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, + *, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + view: typing.Optional[int] = None, + project: typing.Optional[int] = None, + resolve_uri: typing.Optional[bool] = None, + fields: typing.Optional[TasksListRequestFields] = None, + review: typing.Optional[bool] = None, + include: typing.Optional[str] = None, + query: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> SyncPager[Task]: + """ + + Retrieve a list of tasks. + + You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). + + Parameters + ---------- + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + view : typing.Optional[int] + View ID + + project : typing.Optional[int] + Project ID + + resolve_uri : typing.Optional[bool] + Resolve task data URIs using Cloud Storage + + fields : typing.Optional[TasksListRequestFields] + Set to "all" if you want to include annotations and predictions in the response + + review : typing.Optional[bool] + Get tasks for review + + include : typing.Optional[str] + Specify which fields to include in the response + + query : typing.Optional[str] + Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. + + * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
+ Example: `["completed_at"]` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SyncPager[Task] + List of Tasks + """ + page = page if page is not None else 1 + + _response = self._client_wrapper.httpx_client.request( + "api/tasks/", + method="GET", + params={ + "page": page, + "page_size": page_size, + "view": view, + "project": project, + "resolve_uri": resolve_uri, + "fields": fields, + "review": review, + "include": include, + "query": query, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.tasks + _has_next = True + _get_next = lambda: self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + return SyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseTask]: + """ + + Create a new labeling task in Label Studio. + + The data you provide depends on your labeling config and data type. + + You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseTask] + Created task + """ + _response = self._client_wrapper.httpx_client.request( + "api/tasks/", + method="POST", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[DataManagerTaskSerializer]: + """ + + Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. + The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DataManagerTaskSerializer] + Task + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a task in Label Studio. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + This action cannot be undone. + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: str, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseTask]: + """ + + Update the attributes of an existing labeling task. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseTask] + Updated task + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawTasksClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create_many_status( + self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ProjectImport]: + """ + + Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. + + You will need the project ID and the unique ID of the import operation. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + The import ID is returned as part of the response when you call [Import tasks](import-tasks). + + Parameters + ---------- + id : int + The project ID. + + import_pk : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectImport] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete_all_tasks( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete all tasks from a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, + *, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + view: typing.Optional[int] = None, + project: typing.Optional[int] = None, + resolve_uri: typing.Optional[bool] = None, + fields: typing.Optional[TasksListRequestFields] = None, + review: typing.Optional[bool] = None, + include: typing.Optional[str] = None, + query: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncPager[Task]: + """ + + Retrieve a list of tasks. + + You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). + + Parameters + ---------- + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + view : typing.Optional[int] + View ID + + project : typing.Optional[int] + Project ID + + resolve_uri : typing.Optional[bool] + Resolve task data URIs using Cloud Storage + + fields : typing.Optional[TasksListRequestFields] + Set to "all" if you want to include annotations and predictions in the response + + review : typing.Optional[bool] + Get tasks for review + + include : typing.Optional[str] + Specify which fields to include in the response + + query : typing.Optional[str] + Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. + + * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
+ Example: `["completed_at"]` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncPager[Task] + List of Tasks + """ + page = page if page is not None else 1 + + _response = await self._client_wrapper.httpx_client.request( + "api/tasks/", + method="GET", + params={ + "page": page, + "page_size": page_size, + "view": view, + "project": project, + "resolve_uri": resolve_uri, + "fields": fields, + "review": review, + "include": include, + "query": query, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.tasks + _has_next = True + + async def _get_next(): + return await self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + + return AsyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseTask]: + """ + + Create a new labeling task in Label Studio. + + The data you provide depends on your labeling config and data type. + + You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseTask] + Created task + """ + _response = await self._client_wrapper.httpx_client.request( + "api/tasks/", + method="POST", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[DataManagerTaskSerializer]: + """ + + Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. + The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DataManagerTaskSerializer] + Task + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a task in Label Studio. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + This action cannot be undone. + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: str, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseTask]: + """ + + Update the attributes of an existing labeling task. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseTask] + Updated task + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/tasks/types/__init__.py b/src/label_studio_sdk/tasks/types/__init__.py index bba1de8a3..436b4e548 100644 --- a/src/label_studio_sdk/tasks/types/__init__.py +++ b/src/label_studio_sdk/tasks/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .tasks_list_request_fields import TasksListRequestFields from .tasks_list_response import TasksListResponse diff --git a/src/label_studio_sdk/tasks/types/tasks_list_response.py b/src/label_studio_sdk/tasks/types/tasks_list_response.py index c8d9e0240..00d717374 100644 --- a/src/label_studio_sdk/tasks/types/tasks_list_response.py +++ b/src/label_studio_sdk/tasks/types/tasks_list_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...types.task import Task + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...types.task import Task class TasksListResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/tokens/__init__.py b/src/label_studio_sdk/tokens/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/tokens/__init__.py +++ b/src/label_studio_sdk/tokens/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/tokens/client.py b/src/label_studio_sdk/tokens/client.py index 4b0308939..0191ade07 100644 --- a/src/label_studio_sdk/tokens/client.py +++ b/src/label_studio_sdk/tokens/client.py @@ -1,18 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from ..errors.not_found_error import NotFoundError -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..types.api_token_response import ApiTokenResponse from ..types.access_token_response import AccessTokenResponse -from ..errors.unauthorized_error import UnauthorizedError +from ..types.api_token_response import ApiTokenResponse from ..types.rotate_token_response import RotateTokenResponse -from ..errors.bad_request_error import BadRequestError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawTokensClient, RawTokensClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -20,7 +15,18 @@ class TokensClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawTokensClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawTokensClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawTokensClient + """ + return self._raw_client def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -49,35 +55,8 @@ def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOpt refresh="refresh", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/token/blacklist", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.blacklist(refresh=refresh, request_options=request_options) + return _response.data def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ApiTokenResponse]: """ @@ -102,24 +81,8 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typ ) client.tokens.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ApiTokenResponse], - parse_obj_as( - type_=typing.List[ApiTokenResponse], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(request_options=request_options) + return _response.data def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ApiTokenResponse: """ @@ -144,24 +107,8 @@ def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ) client.tokens.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/token", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ApiTokenResponse, - parse_obj_as( - type_=ApiTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(request_options=request_options) + return _response.data def refresh(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> AccessTokenResponse: """ @@ -191,41 +138,8 @@ def refresh(self, *, refresh: str, request_options: typing.Optional[RequestOptio refresh="refresh", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/token/refresh", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AccessTokenResponse, - parse_obj_as( - type_=AccessTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 401: - raise UnauthorizedError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.refresh(refresh=refresh, request_options=request_options) + return _response.data def rotate(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> RotateTokenResponse: """ @@ -255,46 +169,24 @@ def rotate(self, *, refresh: str, request_options: typing.Optional[RequestOption refresh="refresh", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/token/rotate", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RotateTokenResponse, - parse_obj_as( - type_=RotateTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.rotate(refresh=refresh, request_options=request_options) + return _response.data class AsyncTokensClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawTokensClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawTokensClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawTokensClient + """ + return self._raw_client async def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -331,35 +223,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/blacklist", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.blacklist(refresh=refresh, request_options=request_options) + return _response.data async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ApiTokenResponse]: """ @@ -392,24 +257,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ApiTokenResponse], - parse_obj_as( - type_=typing.List[ApiTokenResponse], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(request_options=request_options) + return _response.data async def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ApiTokenResponse: """ @@ -442,24 +291,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ApiTokenResponse, - parse_obj_as( - type_=ApiTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(request_options=request_options) + return _response.data async def refresh( self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None @@ -499,41 +332,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/refresh", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AccessTokenResponse, - parse_obj_as( - type_=AccessTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 401: - raise UnauthorizedError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.refresh(refresh=refresh, request_options=request_options) + return _response.data async def rotate( self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None @@ -573,38 +373,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/rotate", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RotateTokenResponse, - parse_obj_as( - type_=RotateTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.rotate(refresh=refresh, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/tokens/raw_client.py b/src/label_studio_sdk/tokens/raw_client.py new file mode 100644 index 000000000..a68f07754 --- /dev/null +++ b/src/label_studio_sdk/tokens/raw_client.py @@ -0,0 +1,495 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..errors.not_found_error import NotFoundError +from ..errors.unauthorized_error import UnauthorizedError +from ..types.access_token_response import AccessTokenResponse +from ..types.api_token_response import ApiTokenResponse +from ..types.rotate_token_response import RotateTokenResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawTokensClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Blacklist a refresh token to prevent its future use. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/token/blacklist", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ApiTokenResponse]]: + """ + List all API tokens for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ApiTokenResponse]] + List of API tokens retrieved successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ApiTokenResponse], + parse_obj_as( + type_=typing.List[ApiTokenResponse], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[ApiTokenResponse]: + """ + Create a new API token for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ApiTokenResponse] + Token created successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/token", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiTokenResponse, + parse_obj_as( + type_=ApiTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def refresh( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AccessTokenResponse]: + """ + Get a new access token, using a refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AccessTokenResponse] + New access token created successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/token/refresh", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AccessTokenResponse, + parse_obj_as( + type_=AccessTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def rotate( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RotateTokenResponse]: + """ + Blacklist existing refresh token, and get a new refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RotateTokenResponse] + Refresh token successfully rotated + """ + _response = self._client_wrapper.httpx_client.request( + "api/token/rotate", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RotateTokenResponse, + parse_obj_as( + type_=RotateTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawTokensClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def blacklist( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Blacklist a refresh token to prevent its future use. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token/blacklist", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ApiTokenResponse]]: + """ + List all API tokens for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ApiTokenResponse]] + List of API tokens retrieved successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ApiTokenResponse], + parse_obj_as( + type_=typing.List[ApiTokenResponse], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ApiTokenResponse]: + """ + Create a new API token for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ApiTokenResponse] + Token created successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiTokenResponse, + parse_obj_as( + type_=ApiTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def refresh( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AccessTokenResponse]: + """ + Get a new access token, using a refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AccessTokenResponse] + New access token created successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token/refresh", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AccessTokenResponse, + parse_obj_as( + type_=AccessTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def rotate( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RotateTokenResponse]: + """ + Blacklist existing refresh token, and get a new refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RotateTokenResponse] + Refresh token successfully rotated + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token/rotate", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RotateTokenResponse, + parse_obj_as( + type_=RotateTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/types/__init__.py b/src/label_studio_sdk/types/__init__.py index 6fe7e2249..88988e3a4 100644 --- a/src/label_studio_sdk/types/__init__.py +++ b/src/label_studio_sdk/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .access_token_response import AccessTokenResponse from .annotation import Annotation from .annotation_completed_by import AnnotationCompletedBy diff --git a/src/label_studio_sdk/types/access_token_response.py b/src/label_studio_sdk/types/access_token_response.py index 3c55d97da..57df75713 100644 --- a/src/label_studio_sdk/types/access_token_response.py +++ b/src/label_studio_sdk/types/access_token_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class AccessTokenResponse(UniversalBaseModel): access: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/annotation.py b/src/label_studio_sdk/types/annotation.py index 9463e39b2..e27a951a7 100644 --- a/src/label_studio_sdk/types/annotation.py +++ b/src/label_studio_sdk/types/annotation.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotation_completed_by import AnnotationCompletedBy -import datetime as dt from .annotation_last_action import AnnotationLastAction -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Annotation(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/annotation_completed_by.py b/src/label_studio_sdk/types/annotation_completed_by.py index b9e7caf52..f3f4a635e 100644 --- a/src/label_studio_sdk/types/annotation_completed_by.py +++ b/src/label_studio_sdk/types/annotation_completed_by.py @@ -1,6 +1,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .user_simple import UserSimple AnnotationCompletedBy = typing.Union[UserSimple, int] diff --git a/src/label_studio_sdk/types/annotation_filter_options.py b/src/label_studio_sdk/types/annotation_filter_options.py index 3f00e64ba..3e3f86ed4 100644 --- a/src/label_studio_sdk/types/annotation_filter_options.py +++ b/src/label_studio_sdk/types/annotation_filter_options.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AnnotationFilterOptions(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/annotations_dm_field.py b/src/label_studio_sdk/types/annotations_dm_field.py index 114de210d..0aa2976f4 100644 --- a/src/label_studio_sdk/types/annotations_dm_field.py +++ b/src/label_studio_sdk/types/annotations_dm_field.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotations_dm_field_last_action import AnnotationsDmFieldLastAction -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AnnotationsDmField(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/api_token_response.py b/src/label_studio_sdk/types/api_token_response.py index 72d4fddd3..5f7ab74bd 100644 --- a/src/label_studio_sdk/types/api_token_response.py +++ b/src/label_studio_sdk/types/api_token_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class ApiTokenResponse(UniversalBaseModel): token: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/azure_blob_export_storage.py b/src/label_studio_sdk/types/azure_blob_export_storage.py index 83394ea4d..20500cf84 100644 --- a/src/label_studio_sdk/types/azure_blob_export_storage.py +++ b/src/label_studio_sdk/types/azure_blob_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .azure_blob_export_storage_status import AzureBlobExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AzureBlobExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/azure_blob_import_storage.py b/src/label_studio_sdk/types/azure_blob_import_storage.py index 3de9b873b..c5f3648b0 100644 --- a/src/label_studio_sdk/types/azure_blob_import_storage.py +++ b/src/label_studio_sdk/types/azure_blob_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .azure_blob_import_storage_status import AzureBlobImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AzureBlobImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/base_task.py b/src/label_studio_sdk/types/base_task.py index 3e8251862..a06228dd3 100644 --- a/src/label_studio_sdk/types/base_task.py +++ b/src/label_studio_sdk/types/base_task.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt -from .base_task_updated_by import BaseTaskUpdatedBy +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .base_task_file_upload import BaseTaskFileUpload -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .base_task_updated_by import BaseTaskUpdatedBy class BaseTask(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/base_user.py b/src/label_studio_sdk/types/base_user.py index 6edb03619..ef14b7fa2 100644 --- a/src/label_studio_sdk/types/base_user.py +++ b/src/label_studio_sdk/types/base_user.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class BaseUser(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/comment.py b/src/label_studio_sdk/types/comment.py index 5f48c133b..f52785a71 100644 --- a/src/label_studio_sdk/types/comment.py +++ b/src/label_studio_sdk/types/comment.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -from .comment_created_by import CommentCreatedBy import datetime as dt import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .comment_created_by import CommentCreatedBy class Comment(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/converted_format.py b/src/label_studio_sdk/types/converted_format.py index bc0bf56aa..70b6583de 100644 --- a/src/label_studio_sdk/types/converted_format.py +++ b/src/label_studio_sdk/types/converted_format.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .converted_format_status import ConvertedFormatStatus + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .converted_format_status import ConvertedFormatStatus class ConvertedFormat(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer.py b/src/label_studio_sdk/types/data_manager_task_serializer.py index ca9f9c7ea..e9dcf9447 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .data_manager_task_serializer_predictions_item import DataManagerTaskSerializerPredictionsItem + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotations_dm_field import AnnotationsDmField -from .data_manager_task_serializer_drafts_item import DataManagerTaskSerializerDraftsItem from .data_manager_task_serializer_annotators_item import DataManagerTaskSerializerAnnotatorsItem -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .data_manager_task_serializer_drafts_item import DataManagerTaskSerializerDraftsItem +from .data_manager_task_serializer_predictions_item import DataManagerTaskSerializerPredictionsItem class DataManagerTaskSerializer(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py index 792c89c46..8d334b7b4 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class DataManagerTaskSerializerDraftsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py index 8b01227e4..0336ae0bd 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class DataManagerTaskSerializerPredictionsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/export.py b/src/label_studio_sdk/types/export.py index 1d7f45038..c1c80f164 100644 --- a/src/label_studio_sdk/types/export.py +++ b/src/label_studio_sdk/types/export.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from .user_simple import UserSimple import datetime as dt +import typing + import pydantic -from .export_status import ExportStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .converted_format import ConvertedFormat -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .export_status import ExportStatus +from .user_simple import UserSimple class Export(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/export_snapshot.py b/src/label_studio_sdk/types/export_snapshot.py index 4f23e9996..afc7920e6 100644 --- a/src/label_studio_sdk/types/export_snapshot.py +++ b/src/label_studio_sdk/types/export_snapshot.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from .user_simple import UserSimple import datetime as dt +import typing + import pydantic -from .export_snapshot_status import ExportSnapshotStatus -from .converted_format import ConvertedFormat -from .task_filter_options import TaskFilterOptions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotation_filter_options import AnnotationFilterOptions +from .converted_format import ConvertedFormat +from .export_snapshot_status import ExportSnapshotStatus from .serialization_options import SerializationOptions -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .task_filter_options import TaskFilterOptions +from .user_simple import UserSimple class ExportSnapshot(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/file_upload.py b/src/label_studio_sdk/types/file_upload.py index 8fcd31f62..c4320edaf 100644 --- a/src/label_studio_sdk/types/file_upload.py +++ b/src/label_studio_sdk/types/file_upload.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class FileUpload(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/filter.py b/src/label_studio_sdk/types/filter.py index c5e37fa4d..c301db612 100644 --- a/src/label_studio_sdk/types/filter.py +++ b/src/label_studio_sdk/types/filter.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class Filter(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/filter_group.py b/src/label_studio_sdk/types/filter_group.py index 626b8a439..7dc87b712 100644 --- a/src/label_studio_sdk/types/filter_group.py +++ b/src/label_studio_sdk/types/filter_group.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .filter import Filter + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .filter import Filter class FilterGroup(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_export_storage.py b/src/label_studio_sdk/types/gcs_export_storage.py index 399102266..df5cd7189 100644 --- a/src/label_studio_sdk/types/gcs_export_storage.py +++ b/src/label_studio_sdk/types/gcs_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .gcs_export_storage_status import GcsExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class GcsExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_import_storage.py b/src/label_studio_sdk/types/gcs_import_storage.py index ee406e985..269b48fd0 100644 --- a/src/label_studio_sdk/types/gcs_import_storage.py +++ b/src/label_studio_sdk/types/gcs_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .gcs_import_storage_status import GcsImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class GcsImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/inference_run.py b/src/label_studio_sdk/types/inference_run.py index b6837d9cc..6a1e5d6f5 100644 --- a/src/label_studio_sdk/types/inference_run.py +++ b/src/label_studio_sdk/types/inference_run.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .inference_run_organization import InferenceRunOrganization + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .inference_run_created_by import InferenceRunCreatedBy +from .inference_run_organization import InferenceRunOrganization from .inference_run_project_subset import InferenceRunProjectSubset from .inference_run_status import InferenceRunStatus -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 -import pydantic class InferenceRun(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/inference_run_cost_estimate.py b/src/label_studio_sdk/types/inference_run_cost_estimate.py index 103f975c3..531292f47 100644 --- a/src/label_studio_sdk/types/inference_run_cost_estimate.py +++ b/src/label_studio_sdk/types/inference_run_cost_estimate.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class InferenceRunCostEstimate(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/jwt_settings_response.py b/src/label_studio_sdk/types/jwt_settings_response.py index a2c1fb95a..99e431d8f 100644 --- a/src/label_studio_sdk/types/jwt_settings_response.py +++ b/src/label_studio_sdk/types/jwt_settings_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class JwtSettingsResponse(UniversalBaseModel): api_tokens_enabled: bool = pydantic.Field() diff --git a/src/label_studio_sdk/types/key_indicator_value.py b/src/label_studio_sdk/types/key_indicator_value.py index 291358e18..116d4d2f6 100644 --- a/src/label_studio_sdk/types/key_indicator_value.py +++ b/src/label_studio_sdk/types/key_indicator_value.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class KeyIndicatorValue(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators.py b/src/label_studio_sdk/types/key_indicators.py index 23e5b71de..d03c3cd18 100644 --- a/src/label_studio_sdk/types/key_indicators.py +++ b/src/label_studio_sdk/types/key_indicators.py @@ -1,6 +1,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .key_indicators_item import KeyIndicatorsItem KeyIndicators = typing.List[KeyIndicatorsItem] diff --git a/src/label_studio_sdk/types/key_indicators_item.py b/src/label_studio_sdk/types/key_indicators_item.py index 2f6a26c70..bc5dc63a6 100644 --- a/src/label_studio_sdk/types/key_indicators_item.py +++ b/src/label_studio_sdk/types/key_indicators_item.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .key_indicators_item_additional_kpis_item import KeyIndicatorsItemAdditionalKpisItem from .key_indicators_item_extra_kpis_item import KeyIndicatorsItemExtraKpisItem -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class KeyIndicatorsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py index a0e1b06ca..ea89a9020 100644 --- a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class KeyIndicatorsItemAdditionalKpisItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py index 9e539bc1d..fe5f23248 100644 --- a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class KeyIndicatorsItemExtraKpisItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_export_storage.py b/src/label_studio_sdk/types/local_files_export_storage.py index fffaaaa84..596487c0d 100644 --- a/src/label_studio_sdk/types/local_files_export_storage.py +++ b/src/label_studio_sdk/types/local_files_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .local_files_export_storage_status import LocalFilesExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class LocalFilesExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_import_storage.py b/src/label_studio_sdk/types/local_files_import_storage.py index 57240a844..5a0d70f93 100644 --- a/src/label_studio_sdk/types/local_files_import_storage.py +++ b/src/label_studio_sdk/types/local_files_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .local_files_import_storage_status import LocalFilesImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class LocalFilesImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/ml_backend.py b/src/label_studio_sdk/types/ml_backend.py index 21fd41e90..067326c1e 100644 --- a/src/label_studio_sdk/types/ml_backend.py +++ b/src/label_studio_sdk/types/ml_backend.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .ml_backend_state import MlBackendState + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_backend_auth_method import MlBackendAuthMethod -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .ml_backend_state import MlBackendState class MlBackend(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/model_provider_connection.py b/src/label_studio_sdk/types/model_provider_connection.py index 517877bde..2d624ef77 100644 --- a/src/label_studio_sdk/types/model_provider_connection.py +++ b/src/label_studio_sdk/types/model_provider_connection.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -from .model_provider_connection_provider import ModelProviderConnectionProvider -import typing -from .model_provider_connection_scope import ModelProviderConnectionScope -from .model_provider_connection_organization import ModelProviderConnectionOrganization -from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy import datetime as dt +import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from .model_provider_connection_organization import ModelProviderConnectionOrganization +from .model_provider_connection_provider import ModelProviderConnectionProvider +from .model_provider_connection_scope import ModelProviderConnectionScope class ModelProviderConnection(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/pause.py b/src/label_studio_sdk/types/pause.py index 7c9af1c99..74f5f7829 100644 --- a/src/label_studio_sdk/types/pause.py +++ b/src/label_studio_sdk/types/pause.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .pause_paused_by import PausePausedBy + import pydantic -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .pause_paused_by import PausePausedBy class Pause(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prediction.py b/src/label_studio_sdk/types/prediction.py index efb00f16b..797207464 100644 --- a/src/label_studio_sdk/types/prediction.py +++ b/src/label_studio_sdk/types/prediction.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class Prediction(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project.py b/src/label_studio_sdk/types/project.py index 0c78fddeb..d35c7ffa0 100644 --- a/src/label_studio_sdk/types/project.py +++ b/src/label_studio_sdk/types/project.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -from .prompt import Prompt -from .user_simple import UserSimple -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .project_sampling import ProjectSampling from .project_skip_queue import ProjectSkipQueue -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .prompt import Prompt +from .user_simple import UserSimple class Project(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project_import.py b/src/label_studio_sdk/types/project_import.py index 331fd485a..9874942a7 100644 --- a/src/label_studio_sdk/types/project_import.py +++ b/src/label_studio_sdk/types/project_import.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from .project_import_status import ProjectImportStatus import datetime as dt +import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .project_import_status import ProjectImportStatus class ProjectImport(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project_label_config.py b/src/label_studio_sdk/types/project_label_config.py index 443fbb86e..5b21c6e3b 100644 --- a/src/label_studio_sdk/types/project_label_config.py +++ b/src/label_studio_sdk/types/project_label_config.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class ProjectLabelConfig(UniversalBaseModel): label_config: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/prompt.py b/src/label_studio_sdk/types/prompt.py index 81c04e8c3..ad2173741 100644 --- a/src/label_studio_sdk/types/prompt.py +++ b/src/label_studio_sdk/types/prompt.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic +import datetime as dt import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .prompt_associated_projects_item import PromptAssociatedProjectsItem from .prompt_created_by import PromptCreatedBy -import datetime as dt from .prompt_organization import PromptOrganization -from .prompt_associated_projects_item import PromptAssociatedProjectsItem -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Prompt(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_associated_projects_item.py b/src/label_studio_sdk/types/prompt_associated_projects_item.py index 05ad2f37c..3b1bd38e7 100644 --- a/src/label_studio_sdk/types/prompt_associated_projects_item.py +++ b/src/label_studio_sdk/types/prompt_associated_projects_item.py @@ -1,6 +1,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .prompt_associated_projects_item_id import PromptAssociatedProjectsItemId PromptAssociatedProjectsItem = typing.Union[int, PromptAssociatedProjectsItemId] diff --git a/src/label_studio_sdk/types/prompt_associated_projects_item_id.py b/src/label_studio_sdk/types/prompt_associated_projects_item_id.py index 01c5c53c6..aad441f74 100644 --- a/src/label_studio_sdk/types/prompt_associated_projects_item_id.py +++ b/src/label_studio_sdk/types/prompt_associated_projects_item_id.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptAssociatedProjectsItemId(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_version.py b/src/label_studio_sdk/types/prompt_version.py index 38f317b13..0058662f4 100644 --- a/src/label_studio_sdk/types/prompt_version.py +++ b/src/label_studio_sdk/types/prompt_version.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .prompt_version_provider import PromptVersionProvider + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .prompt_version_created_by import PromptVersionCreatedBy -import datetime as dt from .prompt_version_organization import PromptVersionOrganization -from ..core.pydantic_utilities import IS_PYDANTIC_V2 -import pydantic +from .prompt_version_provider import PromptVersionProvider class PromptVersion(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/redis_export_storage.py b/src/label_studio_sdk/types/redis_export_storage.py index 49d816584..e99a9d5c5 100644 --- a/src/label_studio_sdk/types/redis_export_storage.py +++ b/src/label_studio_sdk/types/redis_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .redis_export_storage_status import RedisExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class RedisExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/redis_import_storage.py b/src/label_studio_sdk/types/redis_import_storage.py index 0790e279c..a80604fee 100644 --- a/src/label_studio_sdk/types/redis_import_storage.py +++ b/src/label_studio_sdk/types/redis_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .redis_import_storage_status import RedisImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class RedisImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/refined_prompt_response.py b/src/label_studio_sdk/types/refined_prompt_response.py index c4b20989f..728f5ff81 100644 --- a/src/label_studio_sdk/types/refined_prompt_response.py +++ b/src/label_studio_sdk/types/refined_prompt_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from .refined_prompt_response_refinement_status import RefinedPromptResponseRefinementStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .prompt_version import PromptVersion -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .refined_prompt_response_refinement_status import RefinedPromptResponseRefinementStatus class RefinedPromptResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/rotate_token_response.py b/src/label_studio_sdk/types/rotate_token_response.py index 81e404eb6..b71032dae 100644 --- a/src/label_studio_sdk/types/rotate_token_response.py +++ b/src/label_studio_sdk/types/rotate_token_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class RotateTokenResponse(UniversalBaseModel): refresh: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/s3export_storage.py b/src/label_studio_sdk/types/s3export_storage.py index ed4e36389..a658136dd 100644 --- a/src/label_studio_sdk/types/s3export_storage.py +++ b/src/label_studio_sdk/types/s3export_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from .s3export_storage_status import S3ExportStorageStatus import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .s3export_storage_status import S3ExportStorageStatus class S3ExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3import_storage.py b/src/label_studio_sdk/types/s3import_storage.py index dc713fc85..87faf912f 100644 --- a/src/label_studio_sdk/types/s3import_storage.py +++ b/src/label_studio_sdk/types/s3import_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from .s3import_storage_status import S3ImportStorageStatus import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .s3import_storage_status import S3ImportStorageStatus class S3ImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3s_export_storage.py b/src/label_studio_sdk/types/s3s_export_storage.py index d7e1d616f..8e63ac2a3 100644 --- a/src/label_studio_sdk/types/s3s_export_storage.py +++ b/src/label_studio_sdk/types/s3s_export_storage.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class S3SExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3s_import_storage.py b/src/label_studio_sdk/types/s3s_import_storage.py index 1362a231c..610b726a8 100644 --- a/src/label_studio_sdk/types/s3s_import_storage.py +++ b/src/label_studio_sdk/types/s3s_import_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from .s3s_import_storage_status import S3SImportStorageStatus import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .s3s_import_storage_status import S3SImportStorageStatus class S3SImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/serialization_option.py b/src/label_studio_sdk/types/serialization_option.py index 347950cba..961b0809a 100644 --- a/src/label_studio_sdk/types/serialization_option.py +++ b/src/label_studio_sdk/types/serialization_option.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class SerializationOption(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/serialization_options.py b/src/label_studio_sdk/types/serialization_options.py index 164de5ce3..08d9f9655 100644 --- a/src/label_studio_sdk/types/serialization_options.py +++ b/src/label_studio_sdk/types/serialization_options.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .serialization_option import SerializationOption + import pydantic import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .serialization_option import SerializationOption class SerializationOptions(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/task.py b/src/label_studio_sdk/types/task.py index a0965e4fc..7e3d143f2 100644 --- a/src/label_studio_sdk/types/task.py +++ b/src/label_studio_sdk/types/task.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .task_annotators_item import TaskAnnotatorsItem -import datetime as dt from .task_comment_authors_item import TaskCommentAuthorsItem -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Task(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/task_filter_options.py b/src/label_studio_sdk/types/task_filter_options.py index 04b817727..023cce091 100644 --- a/src/label_studio_sdk/types/task_filter_options.py +++ b/src/label_studio_sdk/types/task_filter_options.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class TaskFilterOptions(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/user_simple.py b/src/label_studio_sdk/types/user_simple.py index ff7b3e16c..0d258fbcd 100644 --- a/src/label_studio_sdk/types/user_simple.py +++ b/src/label_studio_sdk/types/user_simple.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class UserSimple(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/view.py b/src/label_studio_sdk/types/view.py index 1713377c4..ea1104e21 100644 --- a/src/label_studio_sdk/types/view.py +++ b/src/label_studio_sdk/types/view.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .filter_group import FilterGroup + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .filter_group import FilterGroup class View(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/webhook.py b/src/label_studio_sdk/types/webhook.py index 569486be6..ad0fa4264 100644 --- a/src/label_studio_sdk/types/webhook.py +++ b/src/label_studio_sdk/types/webhook.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .webhook_actions_item import WebhookActionsItem -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Webhook(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/webhook_serializer_for_update.py b/src/label_studio_sdk/types/webhook_serializer_for_update.py index b257c3910..782886d5f 100644 --- a/src/label_studio_sdk/types/webhook_serializer_for_update.py +++ b/src/label_studio_sdk/types/webhook_serializer_for_update.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class WebhookSerializerForUpdate(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/workspace.py b/src/label_studio_sdk/types/workspace.py index e0adc186c..bda30027f 100644 --- a/src/label_studio_sdk/types/workspace.py +++ b/src/label_studio_sdk/types/workspace.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class Workspace(UniversalBaseModel): diff --git a/src/label_studio_sdk/users/__init__.py b/src/label_studio_sdk/users/__init__.py index 92fd561f2..3c70756d1 100644 --- a/src/label_studio_sdk/users/__init__.py +++ b/src/label_studio_sdk/users/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import UsersGetTokenResponse, UsersResetTokenResponse __all__ = ["UsersGetTokenResponse", "UsersResetTokenResponse"] diff --git a/src/label_studio_sdk/users/client.py b/src/label_studio_sdk/users/client.py index 62618163a..a6733ce9a 100644 --- a/src/label_studio_sdk/users/client.py +++ b/src/label_studio_sdk/users/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from .types.users_reset_token_response import UsersResetTokenResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.users_get_token_response import UsersGetTokenResponse from ..types.base_user import BaseUser -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawUsersClient, RawUsersClient +from .types.users_get_token_response import UsersGetTokenResponse +from .types.users_reset_token_response import UsersResetTokenResponse # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class UsersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawUsersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawUsersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawUsersClient + """ + return self._raw_client def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersResetTokenResponse: """ @@ -43,24 +51,8 @@ def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None ) client.users.reset_token() """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersResetTokenResponse, - parse_obj_as( - type_=UsersResetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.reset_token(request_options=request_options) + return _response.data def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersGetTokenResponse: """ @@ -85,24 +77,8 @@ def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) ) client.users.get_token() """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersGetTokenResponse, - parse_obj_as( - type_=UsersGetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get_token(request_options=request_options) + return _response.data def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -127,24 +103,8 @@ def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> ) client.users.whoami() """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/whoami", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.whoami(request_options=request_options) + return _response.data def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ @@ -170,24 +130,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.users.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/users/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[BaseUser], - parse_obj_as( - type_=typing.List[BaseUser], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -253,39 +197,19 @@ def create( ) client.users.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/users/", - method="POST", - json={ - "id": id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + id=id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -317,24 +241,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -368,18 +276,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -453,44 +351,36 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "id": users_update_request_id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + users_update_request_id=users_update_request_id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncUsersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawUsersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawUsersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawUsersClient + """ + return self._raw_client async def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersResetTokenResponse: """ @@ -523,24 +413,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersResetTokenResponse, - parse_obj_as( - type_=UsersResetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.reset_token(request_options=request_options) + return _response.data async def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersGetTokenResponse: """ @@ -573,24 +447,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersGetTokenResponse, - parse_obj_as( - type_=UsersGetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get_token(request_options=request_options) + return _response.data async def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -623,24 +481,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/whoami", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.whoami(request_options=request_options) + return _response.data async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ @@ -674,24 +516,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/users/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[BaseUser], - parse_obj_as( - type_=typing.List[BaseUser], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -765,39 +591,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/users/", - method="POST", - json={ - "id": id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + id=id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -837,24 +643,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -896,18 +686,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -989,36 +769,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "id": users_update_request_id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + users_update_request_id=users_update_request_id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/users/raw_client.py b/src/label_studio_sdk/users/raw_client.py new file mode 100644 index 000000000..323c86805 --- /dev/null +++ b/src/label_studio_sdk/users/raw_client.py @@ -0,0 +1,833 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.base_user import BaseUser +from .types.users_get_token_response import UsersGetTokenResponse +from .types.users_reset_token_response import UsersResetTokenResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawUsersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def reset_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[UsersResetTokenResponse]: + """ + Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[UsersResetTokenResponse] + User token response + """ + _response = self._client_wrapper.httpx_client.request( + "api/current-user/reset-token/", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[UsersGetTokenResponse]: + """ + Get a access token to authenticate to the API as the current user. To find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[UsersGetTokenResponse] + User token response + """ + _response = self._client_wrapper.httpx_client.request( + "api/current-user/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[BaseUser]: + """ + Get information about your user account, such as your username, email, and user ID. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/current-user/whoami", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[BaseUser]]: + """ + + List all users in your Label Studio organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[BaseUser]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/users/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseUser]: + """ + + Create a user in Label Studio. + + Parameters + ---------- + id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/users/", + method="POST", + json={ + "id": id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[BaseUser]: + """ + + Get info about a specific Label Studio user. + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Label Studio user. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + users_update_request_id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseUser]: + """ + + Update details for a specific Label Studio user, such as their name or contact information. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + users_update_request_id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "id": users_update_request_id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawUsersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def reset_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[UsersResetTokenResponse]: + """ + Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[UsersResetTokenResponse] + User token response + """ + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/reset-token/", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[UsersGetTokenResponse]: + """ + Get a access token to authenticate to the API as the current user. To find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[UsersGetTokenResponse] + User token response + """ + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[BaseUser]: + """ + Get information about your user account, such as your username, email, and user ID. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/whoami", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[BaseUser]]: + """ + + List all users in your Label Studio organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[BaseUser]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/users/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseUser]: + """ + + Create a user in Label Studio. + + Parameters + ---------- + id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/users/", + method="POST", + json={ + "id": id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[BaseUser]: + """ + + Get info about a specific Label Studio user. + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Label Studio user. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + users_update_request_id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseUser]: + """ + + Update details for a specific Label Studio user, such as their name or contact information. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + users_update_request_id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "id": users_update_request_id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/users/types/__init__.py b/src/label_studio_sdk/users/types/__init__.py index 69c55eb33..4aa8d7683 100644 --- a/src/label_studio_sdk/users/types/__init__.py +++ b/src/label_studio_sdk/users/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .users_get_token_response import UsersGetTokenResponse from .users_reset_token_response import UsersResetTokenResponse diff --git a/src/label_studio_sdk/users/types/users_get_token_response.py b/src/label_studio_sdk/users/types/users_get_token_response.py index 815096b07..fe19030d1 100644 --- a/src/label_studio_sdk/users/types/users_get_token_response.py +++ b/src/label_studio_sdk/users/types/users_get_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class UsersGetTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/users/types/users_reset_token_response.py b/src/label_studio_sdk/users/types/users_reset_token_response.py index 0bdf5a8fe..a038930ce 100644 --- a/src/label_studio_sdk/users/types/users_reset_token_response.py +++ b/src/label_studio_sdk/users/types/users_reset_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class UsersResetTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/versions/__init__.py b/src/label_studio_sdk/versions/__init__.py index e3626239c..64e73bb6c 100644 --- a/src/label_studio_sdk/versions/__init__.py +++ b/src/label_studio_sdk/versions/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import VersionsGetResponse, VersionsGetResponseEdition __all__ = ["VersionsGetResponse", "VersionsGetResponseEdition"] diff --git a/src/label_studio_sdk/versions/client.py b/src/label_studio_sdk/versions/client.py index f9d9df035..4584d7665 100644 --- a/src/label_studio_sdk/versions/client.py +++ b/src/label_studio_sdk/versions/client.py @@ -1,18 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.client_wrapper import SyncClientWrapper import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions +from .raw_client import AsyncRawVersionsClient, RawVersionsClient from .types.versions_get_response import VersionsGetResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper class VersionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawVersionsClient + """ + return self._raw_client def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> VersionsGetResponse: """ @@ -37,29 +46,24 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ver ) client.versions.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/version", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - VersionsGetResponse, - parse_obj_as( - type_=VersionsGetResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(request_options=request_options) + return _response.data class AsyncVersionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawVersionsClient + """ + return self._raw_client async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> VersionsGetResponse: """ @@ -92,21 +96,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/version", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - VersionsGetResponse, - parse_obj_as( - type_=VersionsGetResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/versions/raw_client.py b/src/label_studio_sdk/versions/raw_client.py new file mode 100644 index 000000000..4ad3ed634 --- /dev/null +++ b/src/label_studio_sdk/versions/raw_client.py @@ -0,0 +1,91 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from .types.versions_get_response import VersionsGetResponse + + +class RawVersionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[VersionsGetResponse]: + """ + Get version information about the Label Studio instance. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[VersionsGetResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/version", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + VersionsGetResponse, + parse_obj_as( + type_=VersionsGetResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawVersionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[VersionsGetResponse]: + """ + Get version information about the Label Studio instance. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[VersionsGetResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/version", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + VersionsGetResponse, + parse_obj_as( + type_=VersionsGetResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/versions/types/__init__.py b/src/label_studio_sdk/versions/types/__init__.py index 852040b2d..a4dddd9ad 100644 --- a/src/label_studio_sdk/versions/types/__init__.py +++ b/src/label_studio_sdk/versions/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .versions_get_response import VersionsGetResponse from .versions_get_response_edition import VersionsGetResponseEdition diff --git a/src/label_studio_sdk/versions/types/versions_get_response.py b/src/label_studio_sdk/versions/types/versions_get_response.py index 5e25f1e81..3ef46f094 100644 --- a/src/label_studio_sdk/versions/types/versions_get_response.py +++ b/src/label_studio_sdk/versions/types/versions_get_response.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata from .versions_get_response_edition import VersionsGetResponseEdition -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class VersionsGetResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/__init__.py b/src/label_studio_sdk/views/__init__.py index 498ccadb3..9fc67fadc 100644 --- a/src/label_studio_sdk/views/__init__.py +++ b/src/label_studio_sdk/views/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( ViewsCreateRequestData, ViewsCreateRequestDataFilters, diff --git a/src/label_studio_sdk/views/client.py b/src/label_studio_sdk/views/client.py index a544068de..662ad680d 100644 --- a/src/label_studio_sdk/views/client.py +++ b/src/label_studio_sdk/views/client.py @@ -1,17 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.view import View -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from .raw_client import AsyncRawViewsClient, RawViewsClient from .types.views_create_request_data import ViewsCreateRequestData -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.jsonable_encoder import jsonable_encoder from .types.views_update_request_data import ViewsUpdateRequestData -from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -19,7 +15,18 @@ class ViewsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawViewsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawViewsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawViewsClient + """ + return self._raw_client def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -52,27 +59,8 @@ def list( ) client.views.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[View], - parse_obj_as( - type_=typing.List[View], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -112,34 +100,8 @@ def create( ) client.views.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="POST", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsCreateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -170,25 +132,8 @@ def delete_all(self, *, project: int, request_options: typing.Optional[RequestOp project=1, ) """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/reset/", - method="DELETE", - json={ - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete_all(project=project, request_options=request_options) + return _response.data def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ @@ -219,24 +164,8 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -265,18 +194,8 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -320,39 +239,24 @@ def update( id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsUpdateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data class AsyncViewsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawViewsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawViewsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawViewsClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None @@ -393,27 +297,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[View], - parse_obj_as( - type_=typing.List[View], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -461,34 +346,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="POST", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsCreateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data async def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -527,25 +386,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/reset/", - method="DELETE", - json={ - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete_all(project=project, request_options=request_options) + return _response.data async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ @@ -584,24 +426,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -638,18 +464,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -701,31 +517,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsUpdateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/views/raw_client.py b/src/label_studio_sdk/views/raw_client.py new file mode 100644 index 000000000..a778be49f --- /dev/null +++ b/src/label_studio_sdk/views/raw_client.py @@ -0,0 +1,574 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.view import View +from .types.views_create_request_data import ViewsCreateRequestData +from .types.views_update_request_data import ViewsUpdateRequestData + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawViewsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[View]]: + """ + + List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[View]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + data: typing.Optional[ViewsCreateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[View]: + """ + + Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[ViewsCreateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[View] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="POST", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete_all( + self, *, project: int, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/reset/", + method="DELETE", + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[View]: + """ + + Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[View] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: str, + *, + data: typing.Optional[ViewsUpdateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[View]: + """ + + You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + data : typing.Optional[ViewsUpdateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[View] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawViewsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[View]]: + """ + + List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[View]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + data: typing.Optional[ViewsCreateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[View]: + """ + + Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[ViewsCreateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[View] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="POST", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete_all( + self, *, project: int, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/reset/", + method="DELETE", + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[View]: + """ + + Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[View] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: str, + *, + data: typing.Optional[ViewsUpdateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[View]: + """ + + You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + data : typing.Optional[ViewsUpdateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[View] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/views/types/__init__.py b/src/label_studio_sdk/views/types/__init__.py index 56164fc06..0404fff5e 100644 --- a/src/label_studio_sdk/views/types/__init__.py +++ b/src/label_studio_sdk/views/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .views_create_request_data import ViewsCreateRequestData from .views_create_request_data_filters import ViewsCreateRequestDataFilters from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction diff --git a/src/label_studio_sdk/views/types/views_create_request_data.py b/src/label_studio_sdk/views/types/views_create_request_data.py index e5a689a01..e7020d25a 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data.py +++ b/src/label_studio_sdk/views/types/views_create_request_data.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from .views_create_request_data_filters import ViewsCreateRequestDataFilters + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_create_request_data_filters import ViewsCreateRequestDataFilters from .views_create_request_data_ordering_item import ViewsCreateRequestDataOrderingItem -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsCreateRequestData(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters.py b/src/label_studio_sdk/views/types/views_create_request_data_filters.py index db91d708f..f0ea6bfa6 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction -import pydantic import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction from .views_create_request_data_filters_items_item import ViewsCreateRequestDataFiltersItemsItem -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsCreateRequestDataFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py index 2bbf8022f..a7280b1b3 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_create_request_data_filters_items_item_filter import ViewsCreateRequestDataFiltersItemsItemFilter +import typing + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_create_request_data_filters_items_item_filter import ViewsCreateRequestDataFiltersItemsItemFilter from .views_create_request_data_filters_items_item_operator import ViewsCreateRequestDataFiltersItemsItemOperator from .views_create_request_data_filters_items_item_value import ViewsCreateRequestDataFiltersItemsItemValue -from ...core.pydantic_utilities import IS_PYDANTIC_V2 -import typing class ViewsCreateRequestDataFiltersItemsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data.py b/src/label_studio_sdk/views/types/views_update_request_data.py index 1a0ceda3c..0b44365a7 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data.py +++ b/src/label_studio_sdk/views/types/views_update_request_data.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from .views_update_request_data_filters import ViewsUpdateRequestDataFilters + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_update_request_data_filters import ViewsUpdateRequestDataFilters from .views_update_request_data_ordering_item import ViewsUpdateRequestDataOrderingItem -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsUpdateRequestData(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters.py b/src/label_studio_sdk/views/types/views_update_request_data_filters.py index 5d35fe732..f4fc71c12 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_update_request_data_filters_conjunction import ViewsUpdateRequestDataFiltersConjunction -import pydantic import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_update_request_data_filters_conjunction import ViewsUpdateRequestDataFiltersConjunction from .views_update_request_data_filters_items_item import ViewsUpdateRequestDataFiltersItemsItem -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsUpdateRequestDataFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py index 88f2e1dcb..bbf3aeab9 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_update_request_data_filters_items_item_filter import ViewsUpdateRequestDataFiltersItemsItemFilter +import typing + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_update_request_data_filters_items_item_filter import ViewsUpdateRequestDataFiltersItemsItemFilter from .views_update_request_data_filters_items_item_operator import ViewsUpdateRequestDataFiltersItemsItemOperator from .views_update_request_data_filters_items_item_value import ViewsUpdateRequestDataFiltersItemsItemValue -from ...core.pydantic_utilities import IS_PYDANTIC_V2 -import typing class ViewsUpdateRequestDataFiltersItemsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/webhooks/__init__.py b/src/label_studio_sdk/webhooks/__init__.py index 338fdac42..2a2583ff5 100644 --- a/src/label_studio_sdk/webhooks/__init__.py +++ b/src/label_studio_sdk/webhooks/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import WebhooksUpdateRequestActionsItem __all__ = ["WebhooksUpdateRequestActionsItem"] diff --git a/src/label_studio_sdk/webhooks/client.py b/src/label_studio_sdk/webhooks/client.py index 12359cfc6..c752a19ad 100644 --- a/src/label_studio_sdk/webhooks/client.py +++ b/src/label_studio_sdk/webhooks/client.py @@ -1,19 +1,16 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.webhook import Webhook -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError from ..types.webhook_actions_item import WebhookActionsItem -import datetime as dt -from ..core.jsonable_encoder import jsonable_encoder -from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem -from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate -from ..core.client_wrapper import AsyncClientWrapper +from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +from .raw_client import AsyncRawWebhooksClient, RawWebhooksClient +from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -21,7 +18,18 @@ class WebhooksClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawWebhooksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawWebhooksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawWebhooksClient + """ + return self._raw_client def list( self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None @@ -56,27 +64,8 @@ def list( ) client.webhooks.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Webhook], - parse_obj_as( - type_=typing.List[Webhook], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -153,38 +142,21 @@ def create( url="url", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="POST", - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = self._raw_client.create( + url=url, + id=id, + organization=organization, + project=project, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def info( self, @@ -217,21 +189,8 @@ def info( ) client.webhooks.info() """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/info/", - method="GET", - params={ - "organization-only": organization_only, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.info(organization_only=organization_only, request_options=request_options) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ @@ -264,24 +223,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -313,18 +256,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -431,51 +364,44 @@ def update( webhook_serializer_for_update_url="url", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id_)}/", - method="PATCH", - params={ - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - }, - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = self._raw_client.update( + id_, + url=url, + webhook_serializer_for_update_url=webhook_serializer_for_update_url, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + id=id, + organization=organization, + project=project, + webhook_serializer_for_update_send_payload=webhook_serializer_for_update_send_payload, + webhook_serializer_for_update_send_for_all_actions=webhook_serializer_for_update_send_for_all_actions, + webhook_serializer_for_update_headers=webhook_serializer_for_update_headers, + webhook_serializer_for_update_is_active=webhook_serializer_for_update_is_active, + webhook_serializer_for_update_actions=webhook_serializer_for_update_actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - WebhookSerializerForUpdate, - parse_obj_as( - type_=WebhookSerializerForUpdate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncWebhooksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawWebhooksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawWebhooksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawWebhooksClient + """ + return self._raw_client async def list( self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None @@ -518,27 +444,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Webhook], - parse_obj_as( - type_=typing.List[Webhook], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -623,38 +530,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="POST", - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = await self._raw_client.create( + url=url, + id=id, + organization=organization, + project=project, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def info( self, @@ -695,21 +585,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/info/", - method="GET", - params={ - "organization-only": organization_only, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.info(organization_only=organization_only, request_options=request_options) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ @@ -750,24 +627,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -807,18 +668,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -933,43 +784,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id_)}/", - method="PATCH", - params={ - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - }, - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = await self._raw_client.update( + id_, + url=url, + webhook_serializer_for_update_url=webhook_serializer_for_update_url, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + id=id, + organization=organization, + project=project, + webhook_serializer_for_update_send_payload=webhook_serializer_for_update_send_payload, + webhook_serializer_for_update_send_for_all_actions=webhook_serializer_for_update_send_for_all_actions, + webhook_serializer_for_update_headers=webhook_serializer_for_update_headers, + webhook_serializer_for_update_is_active=webhook_serializer_for_update_is_active, + webhook_serializer_for_update_actions=webhook_serializer_for_update_actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - WebhookSerializerForUpdate, - parse_obj_as( - type_=WebhookSerializerForUpdate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/webhooks/raw_client.py b/src/label_studio_sdk/webhooks/raw_client.py new file mode 100644 index 000000000..0953cf927 --- /dev/null +++ b/src/label_studio_sdk/webhooks/raw_client.py @@ -0,0 +1,824 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.webhook import Webhook +from ..types.webhook_actions_item import WebhookActionsItem +from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate +from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawWebhooksClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[Webhook]]: + """ + + List all webhooks set up for your organization. + + Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. + + For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). + + Parameters + ---------- + project : typing.Optional[str] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Webhook]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + url: str, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + send_payload: typing.Optional[bool] = OMIT, + send_for_all_actions: typing.Optional[bool] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + is_active: typing.Optional[bool] = OMIT, + actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Webhook]: + """ + + Create a webhook. + Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). + + If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). + + Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. + + Parameters + ---------- + url : str + URL of webhook + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Sequence[WebhookActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Webhook] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def info( + self, + *, + organization_only: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + organization_only : typing.Optional[bool] + organization-only or not + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/info/", + method="GET", + params={ + "organization-only": organization_only, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Webhook]: + """ + + Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Webhook] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id_: int, + *, + url: str, + webhook_serializer_for_update_url: str, + send_payload: typing.Optional[bool] = None, + send_for_all_actions: typing.Optional[bool] = None, + headers: typing.Optional[str] = None, + is_active: typing.Optional[bool] = None, + actions: typing.Optional[ + typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] + ] = None, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_actions: typing.Optional[ + typing.Sequence[WebhookSerializerForUpdateActionsItem] + ] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[WebhookSerializerForUpdate]: + """ + + Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id_ : int + A unique integer value identifying this webhook. + + url : str + URL of webhook + + webhook_serializer_for_update_url : str + URL of webhook + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[str] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + webhook_serializer_for_update_send_payload : typing.Optional[bool] + If value is False send only action + + webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + webhook_serializer_for_update_is_active : typing.Optional[bool] + If value is False the webhook is disabled + + webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[WebhookSerializerForUpdate] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id_)}/", + method="PATCH", + params={ + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + }, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawWebhooksClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Webhook]]: + """ + + List all webhooks set up for your organization. + + Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. + + For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). + + Parameters + ---------- + project : typing.Optional[str] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Webhook]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + url: str, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + send_payload: typing.Optional[bool] = OMIT, + send_for_all_actions: typing.Optional[bool] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + is_active: typing.Optional[bool] = OMIT, + actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Webhook]: + """ + + Create a webhook. + Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). + + If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). + + Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. + + Parameters + ---------- + url : str + URL of webhook + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Sequence[WebhookActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Webhook] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def info( + self, + *, + organization_only: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + organization_only : typing.Optional[bool] + organization-only or not + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/info/", + method="GET", + params={ + "organization-only": organization_only, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Webhook]: + """ + + Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Webhook] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id_: int, + *, + url: str, + webhook_serializer_for_update_url: str, + send_payload: typing.Optional[bool] = None, + send_for_all_actions: typing.Optional[bool] = None, + headers: typing.Optional[str] = None, + is_active: typing.Optional[bool] = None, + actions: typing.Optional[ + typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] + ] = None, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_actions: typing.Optional[ + typing.Sequence[WebhookSerializerForUpdateActionsItem] + ] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[WebhookSerializerForUpdate]: + """ + + Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id_ : int + A unique integer value identifying this webhook. + + url : str + URL of webhook + + webhook_serializer_for_update_url : str + URL of webhook + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[str] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + webhook_serializer_for_update_send_payload : typing.Optional[bool] + If value is False send only action + + webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + webhook_serializer_for_update_is_active : typing.Optional[bool] + If value is False the webhook is disabled + + webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[WebhookSerializerForUpdate] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id_)}/", + method="PATCH", + params={ + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + }, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/webhooks/types/__init__.py b/src/label_studio_sdk/webhooks/types/__init__.py index 5c47f8599..2acf204b9 100644 --- a/src/label_studio_sdk/webhooks/types/__init__.py +++ b/src/label_studio_sdk/webhooks/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem __all__ = ["WebhooksUpdateRequestActionsItem"] diff --git a/src/label_studio_sdk/workspaces/__init__.py b/src/label_studio_sdk/workspaces/__init__.py index ddc7fa13c..795aaf483 100644 --- a/src/label_studio_sdk/workspaces/__init__.py +++ b/src/label_studio_sdk/workspaces/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from . import members from .members import MembersCreateResponse, MembersListResponseItem diff --git a/src/label_studio_sdk/workspaces/client.py b/src/label_studio_sdk/workspaces/client.py index 413ac6b3a..2de101cc5 100644 --- a/src/label_studio_sdk/workspaces/client.py +++ b/src/label_studio_sdk/workspaces/client.py @@ -1,16 +1,12 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper -from .members.client import MembersClient + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.workspace import Workspace -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper -from .members.client import AsyncMembersClient +from .members.client import AsyncMembersClient, MembersClient +from .raw_client import AsyncRawWorkspacesClient, RawWorkspacesClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,8 +14,19 @@ class WorkspacesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.members = MembersClient(client_wrapper=self._client_wrapper) + self._raw_client = RawWorkspacesClient(client_wrapper=client_wrapper) + self.members = MembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawWorkspacesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawWorkspacesClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ @@ -49,24 +56,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.workspaces.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/workspaces", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Workspace], - parse_obj_as( - type_=typing.List[Workspace], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -124,36 +115,16 @@ def create( ) client.workspaces.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/workspaces", - method="POST", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ @@ -184,24 +155,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -231,18 +186,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -302,42 +247,34 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncWorkspacesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.members = AsyncMembersClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawWorkspacesClient(client_wrapper=client_wrapper) + self.members = AsyncMembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawWorkspacesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawWorkspacesClient + """ + return self._raw_client async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ @@ -375,24 +312,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/workspaces", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Workspace], - parse_obj_as( - type_=typing.List[Workspace], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -458,36 +379,16 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/workspaces", - method="POST", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ @@ -526,24 +427,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -581,18 +466,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -660,33 +535,14 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/workspaces/members/__init__.py b/src/label_studio_sdk/workspaces/members/__init__.py index 2e3a8f37d..a527383e7 100644 --- a/src/label_studio_sdk/workspaces/members/__init__.py +++ b/src/label_studio_sdk/workspaces/members/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import MembersCreateResponse, MembersListResponseItem __all__ = ["MembersCreateResponse", "MembersListResponseItem"] diff --git a/src/label_studio_sdk/workspaces/members/client.py b/src/label_studio_sdk/workspaces/members/client.py index 22b0ec5c5..21e304759 100644 --- a/src/label_studio_sdk/workspaces/members/client.py +++ b/src/label_studio_sdk/workspaces/members/client.py @@ -1,15 +1,12 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions -from .types.members_list_response_item import MembersListResponseItem -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawMembersClient, RawMembersClient from .types.members_create_response import MembersCreateResponse -from ...core.client_wrapper import AsyncClientWrapper +from .types.members_list_response_item import MembersListResponseItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -17,7 +14,18 @@ class MembersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawMembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawMembersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawMembersClient + """ + return self._raw_client def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -50,24 +58,8 @@ def list( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MembersListResponseItem], - parse_obj_as( - type_=typing.List[MembersListResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, request_options=request_options) + return _response.data def create( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None @@ -103,31 +95,8 @@ def create( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="POST", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MembersCreateResponse, - parse_obj_as( - type_=MembersCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(id, user=user, request_options=request_options) + return _response.data def delete( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None @@ -162,30 +131,24 @@ def delete( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="DELETE", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, user=user, request_options=request_options) + return _response.data class AsyncMembersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawMembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawMembersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawMembersClient + """ + return self._raw_client async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -226,24 +189,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MembersListResponseItem], - parse_obj_as( - type_=typing.List[MembersListResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, request_options=request_options) + return _response.data async def create( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None @@ -287,31 +234,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="POST", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MembersCreateResponse, - parse_obj_as( - type_=MembersCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(id, user=user, request_options=request_options) + return _response.data async def delete( self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None @@ -354,22 +278,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="DELETE", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, user=user, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/workspaces/members/raw_client.py b/src/label_studio_sdk/workspaces/members/raw_client.py new file mode 100644 index 000000000..0bb64c622 --- /dev/null +++ b/src/label_studio_sdk/workspaces/members/raw_client.py @@ -0,0 +1,290 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from .types.members_create_response import MembersCreateResponse +from .types.members_list_response_item import MembersListResponseItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawMembersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[MembersListResponseItem]]: + """ + + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[MembersListResponseItem]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[MembersCreateResponse]: + """ + + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembersCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawMembersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[MembersListResponseItem]]: + """ + + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[MembersListResponseItem]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[MembersCreateResponse]: + """ + + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembersCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/workspaces/members/types/__init__.py b/src/label_studio_sdk/workspaces/members/types/__init__.py index b6f51dbc8..b1aa23227 100644 --- a/src/label_studio_sdk/workspaces/members/types/__init__.py +++ b/src/label_studio_sdk/workspaces/members/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .members_create_response import MembersCreateResponse from .members_list_response_item import MembersListResponseItem diff --git a/src/label_studio_sdk/workspaces/members/types/members_create_response.py b/src/label_studio_sdk/workspaces/members/types/members_create_response.py index 13ed6d9ba..38c11f695 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_create_response.py +++ b/src/label_studio_sdk/workspaces/members/types/members_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class MembersCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py index d436b704e..0428f3e9a 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py +++ b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class MembersListResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/workspaces/raw_client.py b/src/label_studio_sdk/workspaces/raw_client.py new file mode 100644 index 000000000..e3dfdb351 --- /dev/null +++ b/src/label_studio_sdk/workspaces/raw_client.py @@ -0,0 +1,561 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.workspace import Workspace + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawWorkspacesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[Workspace]]: + """ + + List all workspaces for your organization. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Workspace]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Workspace]: + """ + + Create a new workspace. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Workspace] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Workspace]: + """ + + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Workspace] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Workspace]: + """ + + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Workspace] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawWorkspacesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Workspace]]: + """ + + List all workspaces for your organization. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Workspace]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Workspace]: + """ + + Create a new workspace. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Workspace] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Workspace]: + """ + + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Workspace] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Workspace]: + """ + + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Workspace] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 595f0074c..000000000 --- a/tests/conftest.py +++ /dev/null @@ -1,18 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -import os -import pytest -from label_studio_sdk import AsyncLabelStudio - - -@pytest.fixture -def client() -> LabelStudio: - return LabelStudio(api_key=os.getenv("ENV_API_KEY", "api_key"), base_url=os.getenv("TESTS_BASE_URL", "base_url")) - - -@pytest.fixture -def async_client() -> AsyncLabelStudio: - return AsyncLabelStudio( - api_key=os.getenv("ENV_API_KEY", "api_key"), base_url=os.getenv("TESTS_BASE_URL", "base_url") - ) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 73f811f5e..ab04ce639 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -4,4 +4,4 @@ # Get started with writing tests with pytest at https://docs.pytest.org @pytest.mark.skip(reason="Unimplemented") def test_client() -> None: - assert True == True + assert True diff --git a/tests/export_storage/__init__.py b/tests/export_storage/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/export_storage/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/export_storage/test_azure.py b/tests/export_storage/test_azure.py deleted file mode 100644 index 0b645b804..000000000 --- a/tests/export_storage/test_azure.py +++ /dev/null @@ -1,251 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - }, - ) - response = client.export_storage.azure.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.export_storage.azure.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.azure.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.export_storage.azure.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.azure.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_gcs.py b/tests/export_storage/test_gcs.py deleted file mode 100644 index 7c6f7a22b..000000000 --- a/tests/export_storage/test_gcs.py +++ /dev/null @@ -1,251 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - }, - ) - response = client.export_storage.gcs.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.export_storage.gcs.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.gcs.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.export_storage.gcs.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.gcs.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_local.py b/tests/export_storage/test_local.py deleted file mode 100644 index f39bae76b..000000000 --- a/tests/export_storage/test_local.py +++ /dev/null @@ -1,225 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - }, - ) - response = client.export_storage.local.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.export_storage.local.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.local.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.export_storage.local.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.local.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_redis.py b/tests/export_storage/test_redis.py deleted file mode 100644 index 32e207ab9..000000000 --- a/tests/export_storage/test_redis.py +++ /dev/null @@ -1,261 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "db": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "db": "integer", - "project": "integer", - } - }, - ) - response = client.export_storage.redis.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "db": 1, - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "db": "integer", - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.export_storage.redis.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "db": "integer", - "project": "integer", - } - response = client.export_storage.redis.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "db": 1, - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "db": "integer", - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.export_storage.redis.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "db": "integer", - "project": "integer", - } - response = client.export_storage.redis.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_s3.py b/tests/export_storage/test_s3.py deleted file mode 100644 index ee8909b6d..000000000 --- a/tests/export_storage/test_s3.py +++ /dev/null @@ -1,291 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - }, - ) - response = client.export_storage.s3.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.export_storage.s3.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.export_storage.s3.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_s3s.py b/tests/export_storage/test_s3s.py deleted file mode 100644 index f9efabd90..000000000 --- a/tests/export_storage/test_s3s.py +++ /dev/null @@ -1,175 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - }, - ) - response = client.export_storage.s3s.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3s.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3s.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3s.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) diff --git a/tests/import_storage/__init__.py b/tests/import_storage/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/import_storage/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/import_storage/test_azure.py b/tests/import_storage/test_azure.py deleted file mode 100644 index da52ad2c2..000000000 --- a/tests/import_storage/test_azure.py +++ /dev/null @@ -1,269 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - }, - ) - response = client.import_storage.azure.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.import_storage.azure.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.azure.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.import_storage.azure.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.azure.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_gcs.py b/tests/import_storage/test_gcs.py deleted file mode 100644 index e44d38d91..000000000 --- a/tests/import_storage/test_gcs.py +++ /dev/null @@ -1,269 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - }, - ) - response = client.import_storage.gcs.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.import_storage.gcs.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.gcs.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.import_storage.gcs.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.gcs.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_local.py b/tests/import_storage/test_local.py deleted file mode 100644 index 75f0bff65..000000000 --- a/tests/import_storage/test_local.py +++ /dev/null @@ -1,219 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "project": "integer", - } - }, - ) - response = client.import_storage.local.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.import_storage.local.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "project": "integer", - } - response = client.import_storage.local.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.import_storage.local.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "project": "integer", - } - response = client.import_storage.local.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_redis.py b/tests/import_storage/test_redis.py deleted file mode 100644 index 01cd0696e..000000000 --- a/tests/import_storage/test_redis.py +++ /dev/null @@ -1,255 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "db": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "db": "integer", - "project": "integer", - } - }, - ) - response = client.import_storage.redis.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.import_storage.redis.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "db": "integer", - "project": "integer", - } - response = client.import_storage.redis.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.import_storage.redis.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "db": "integer", - "project": "integer", - } - response = client.import_storage.redis.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_s3.py b/tests/import_storage/test_s3.py deleted file mode 100644 index 3b706703d..000000000 --- a/tests/import_storage/test_s3.py +++ /dev/null @@ -1,319 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - }, - ) - response = client.import_storage.s3.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "recursive_scan": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "recursive_scan": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.import_storage.s3.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "recursive_scan": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "recursive_scan": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.import_storage.s3.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_s3s.py b/tests/import_storage/test_s3s.py deleted file mode 100644 index 96ba46e7a..000000000 --- a/tests/import_storage/test_s3s.py +++ /dev/null @@ -1,329 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - }, - ) - response = client.import_storage.s3s.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/projects/__init__.py b/tests/projects/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/projects/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/projects/test_exports.py b/tests/projects/test_exports.py deleted file mode 100644 index 42d2ad13f..000000000 --- a/tests/projects/test_exports.py +++ /dev/null @@ -1,205 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_formats(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "name": "JSON", - "title": "title", - "description": "description", - "link": "link", - "tags": ["tags"], - "disabled": True, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "name": None, - "title": None, - "description": None, - "link": None, - "tags": ("list", {0: None}), - "disabled": None, - } - }, - ) - response = client.projects.exports.list_formats(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.list_formats(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "title": "title", - "id": 1, - "created_by": { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "email": "email", - "avatar": "avatar", - }, - "created_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "status": "created", - "md5": "md5", - "counters": {"key": "value"}, - "converted_formats": [{"export_type": "export_type"}], - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "title": None, - "id": "integer", - "created_by": {"id": "integer", "first_name": None, "last_name": None, "email": None, "avatar": None}, - "created_at": "datetime", - "finished_at": "datetime", - "status": None, - "md5": None, - "counters": ("dict", {0: (None, None)}), - "converted_formats": ("list", {0: {"export_type": None}}), - } - }, - ) - response = client.projects.exports.list(project_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.list(project_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "id": 1, - "created_by": { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "email": "email", - "avatar": "avatar", - }, - "created_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "status": "created", - "md5": "md5", - "counters": {"key": "value"}, - "converted_formats": [{"id": 1, "status": "created", "export_type": "export_type", "traceback": "traceback"}], - "task_filter_options": { - "view": 1, - "skipped": "skipped", - "finished": "finished", - "annotated": "annotated", - "only_with_annotations": True, - }, - "annotation_filter_options": {"usual": True, "ground_truth": True, "skipped": True}, - "serialization_options": { - "drafts": {"only_id": True}, - "predictions": {"only_id": True}, - "include_annotation_history": True, - "annotations__completed_by": {"only_id": True}, - "interpolate_key_frames": True, - }, - } - expected_types: typing.Any = { - "title": None, - "id": "integer", - "created_by": {"id": "integer", "first_name": None, "last_name": None, "email": None, "avatar": None}, - "created_at": "datetime", - "finished_at": "datetime", - "status": None, - "md5": None, - "counters": ("dict", {0: (None, None)}), - "converted_formats": ("list", {0: {"id": "integer", "status": None, "export_type": None, "traceback": None}}), - "task_filter_options": { - "view": "integer", - "skipped": None, - "finished": None, - "annotated": None, - "only_with_annotations": None, - }, - "annotation_filter_options": {"usual": None, "ground_truth": None, "skipped": None}, - "serialization_options": { - "drafts": {"only_id": None}, - "predictions": {"only_id": None}, - "include_annotation_history": None, - "annotations__completed_by": {"only_id": None}, - "interpolate_key_frames": None, - }, - } - response = client.projects.exports.create(project_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.create(project_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "id": 1, - "created_by": { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "email": "email", - "avatar": "avatar", - }, - "created_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "status": "created", - "md5": "md5", - "counters": {"key": "value"}, - "converted_formats": [{"id": 1, "status": "created", "export_type": "export_type", "traceback": "traceback"}], - } - expected_types: typing.Any = { - "title": None, - "id": "integer", - "created_by": {"id": "integer", "first_name": None, "last_name": None, "email": None, "avatar": None}, - "created_at": "datetime", - "finished_at": "datetime", - "status": None, - "md5": None, - "counters": ("dict", {0: (None, None)}), - "converted_formats": ("list", {0: {"id": "integer", "status": None, "export_type": None, "traceback": None}}), - } - response = client.projects.exports.get(project_id=1, export_pk="export_pk") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.get(project_id=1, export_pk="export_pk") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.projects.exports.delete(project_id=1, export_pk="export_pk") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.projects.exports.delete(project_id=1, export_pk="export_pk") # type: ignore[func-returns-value] - is None - ) - - -async def test_convert(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"export_type": "JSON", "converted_format": 1} - expected_types: typing.Any = {"export_type": None, "converted_format": "integer"} - response = client.projects.exports.convert(project_id=1, export_pk="export_pk") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.convert(project_id=1, export_pk="export_pk") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/projects/test_pauses.py b/tests/projects/test_pauses.py deleted file mode 100644 index 5bf4ad0c3..000000000 --- a/tests/projects/test_pauses.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - }, - ) - response = client.projects.pauses.list(project_pk=1, user_pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.list(project_pk=1, user_pk=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - response = client.projects.pauses.create(project_pk=1, user_pk=1, reason="reason") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.create(project_pk=1, user_pk=1, reason="reason") - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - response = client.projects.pauses.get(project_pk=1, user_pk=1, id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.get(project_pk=1, user_pk=1, id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.projects.pauses.delete(project_pk=1, user_pk=1, id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.projects.pauses.delete(project_pk=1, user_pk=1, id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - response = client.projects.pauses.update(project_pk=1, user_pk=1, id=1, reason="reason") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.update(project_pk=1, user_pk=1, id=1, reason="reason") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/__init__.py b/tests/prompts/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/prompts/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/prompts/test_indicators.py b/tests/prompts/test_indicators.py deleted file mode 100644 index 8167c72e1..000000000 --- a/tests/prompts/test_indicators.py +++ /dev/null @@ -1,47 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "key": "key", - "title": "title", - "main_kpi": "main_kpi", - "secondary_kpi": "secondary_kpi", - "additional_kpis": [{}], - "extra_kpis": [{}], - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "key": None, - "title": None, - "main_kpi": None, - "secondary_kpi": None, - "additional_kpis": ("list", {0: {}}), - "extra_kpis": ("list", {0: {}}), - } - }, - ) - response = client.prompts.indicators.list(pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.indicators.list(pk=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"title": "title", "values": {"key": "value"}} - expected_types: typing.Any = {"title": None, "values": ("dict", {0: (None, None)})} - response = client.prompts.indicators.get(indicator_key="indicator_key", pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.indicators.get(indicator_key="indicator_key", pk=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/test_runs.py b/tests/prompts/test_runs.py deleted file mode 100644 index a3fe40ce5..000000000 --- a/tests/prompts/test_runs.py +++ /dev/null @@ -1,74 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "organization": 1, - "project": 1, - "model_version": 1, - "created_by": 1, - "project_subset": "All", - "status": "Pending", - "job_id": "job_id", - "created_at": "2024-01-15T09:30:00Z", - "triggered_at": "2024-01-15T09:30:00Z", - "predictions_updated_at": "2024-01-15T09:30:00Z", - "completed_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "organization": "integer", - "project": "integer", - "model_version": "integer", - "created_by": "integer", - "project_subset": None, - "status": None, - "job_id": None, - "created_at": "datetime", - "triggered_at": "datetime", - "predictions_updated_at": "datetime", - "completed_at": "datetime", - } - response = client.prompts.runs.list(id=1, version_id=1, project=1, project_subset="All") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.runs.list(id=1, version_id=1, project=1, project_subset="All") - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "organization": 1, - "project": 1, - "model_version": 1, - "created_by": 1, - "project_subset": "All", - "status": "Pending", - "job_id": "job_id", - "created_at": "2024-01-15T09:30:00Z", - "triggered_at": "2024-01-15T09:30:00Z", - "predictions_updated_at": "2024-01-15T09:30:00Z", - "completed_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "organization": "integer", - "project": "integer", - "model_version": "integer", - "created_by": "integer", - "project_subset": None, - "status": None, - "job_id": None, - "created_at": "datetime", - "triggered_at": "datetime", - "predictions_updated_at": "datetime", - "completed_at": "datetime", - } - response = client.prompts.runs.create(id=1, version_id=1, project=1, project_subset="All") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.runs.create(id=1, version_id=1, project=1, project_subset="All") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/test_versions.py b/tests/prompts/test_versions.py deleted file mode 100644 index 584a55e84..000000000 --- a/tests/prompts/test_versions.py +++ /dev/null @@ -1,280 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - }, - ) - response = client.prompts.versions.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - expected_types: typing.Any = { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - response = client.prompts.versions.create(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.create(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - expected_types: typing.Any = { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - response = client.prompts.versions.get(id=1, version_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.get(id=1, version_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - expected_types: typing.Any = { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - response = client.prompts.versions.update(id=1, version_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.update(id=1, version_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_cost_estimate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "prompt_cost_usd": "prompt_cost_usd", - "completion_cost_usd": "completion_cost_usd", - "total_cost_usd": "total_cost_usd", - "is_error": True, - "error_type": "error_type", - "error_message": "error_message", - } - expected_types: typing.Any = { - "prompt_cost_usd": None, - "completion_cost_usd": None, - "total_cost_usd": None, - "is_error": None, - "error_type": None, - "error_message": None, - } - response = client.prompts.versions.cost_estimate(prompt_id=1, version_id=1, project_id=1, project_subset=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.cost_estimate( - prompt_id=1, version_id=1, project_id=1, project_subset=1 - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get_refined_prompt(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "reasoning": "reasoning", - "prompt": "prompt", - "refinement_job_id": "refinement_job_id", - "refinement_status": "Pending", - "total_cost": "total_cost", - "previous_version": { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - }, - } - expected_types: typing.Any = { - "title": None, - "reasoning": None, - "prompt": None, - "refinement_job_id": None, - "refinement_status": None, - "total_cost": None, - "previous_version": { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - }, - } - response = client.prompts.versions.get_refined_prompt( - prompt_id=1, version_id=1, refinement_job_id="refinement_job_id" - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.get_refined_prompt( - prompt_id=1, version_id=1, refinement_job_id="refinement_job_id" - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_refine_prompt(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "reasoning": "reasoning", - "prompt": "prompt", - "refinement_job_id": "refinement_job_id", - "refinement_status": "Pending", - "total_cost": "total_cost", - "previous_version": { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - }, - } - expected_types: typing.Any = { - "title": None, - "reasoning": None, - "prompt": None, - "refinement_job_id": None, - "refinement_status": None, - "total_cost": None, - "previous_version": { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - }, - } - response = client.prompts.versions.refine_prompt(prompt_id=1, version_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.refine_prompt(prompt_id=1, version_id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_actions.py b/tests/test_actions.py deleted file mode 100644 index 5d0be4480..000000000 --- a/tests/test_actions.py +++ /dev/null @@ -1,59 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -from label_studio_sdk.actions import ActionsCreateRequestFilters -from label_studio_sdk.actions import ActionsCreateRequestFiltersItemsItem -from label_studio_sdk.actions import ActionsCreateRequestSelectedItemsExcluded - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.actions.list() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.actions.list() # type: ignore[func-returns-value] - is None - ) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.actions.create( - id="retrieve_tasks_predictions", - project=1, - filters=ActionsCreateRequestFilters( - conjunction="or", - items=[ - ActionsCreateRequestFiltersItemsItem( - filter="filter:tasks:id", operator="greater", type="Number", value=123 - ) - ], - ), - selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), - ordering=["tasks:total_annotations"], - ) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.actions.create( - id="retrieve_tasks_predictions", - project=1, - filters=ActionsCreateRequestFilters( - conjunction="or", - items=[ - ActionsCreateRequestFiltersItemsItem( - filter="filter:tasks:id", operator="greater", type="Number", value=123 - ) - ], - ), - selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), - ordering=["tasks:total_annotations"], - ) # type: ignore[func-returns-value] - is None - ) diff --git a/tests/test_annotations.py b/tests/test_annotations.py deleted file mode 100644 index 1b8cd9161..000000000 --- a/tests/test_annotations.py +++ /dev/null @@ -1,464 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - response = client.annotations.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.annotations.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.annotations.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - response = client.annotations.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - }, - ) - response = client.annotations.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - response = client.annotations.create( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.create( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_create_bulk(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"id": 1}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"id": "integer"}}) - response = client.annotations.create_bulk() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.create_bulk() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_comments.py b/tests/test_comments.py deleted file mode 100644 index 509abf4ee..000000000 --- a/tests/test_comments.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - }, - ) - response = client.comments.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - response = client.comments.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - response = client.comments.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.comments.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.comments.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - response = client.comments.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.update(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_export_storage.py b/tests/test_export_storage.py deleted file mode 100644 index 65d22af47..000000000 --- a/tests/test_export_storage.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"name": "name", "title": "title"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"name": None, "title": None}}) - response = client.export_storage.list_types() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.list_types() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_files.py b/tests/test_files.py deleted file mode 100644 index 5637a8a6b..000000000 --- a/tests/test_files.py +++ /dev/null @@ -1,75 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"id": 1, "file": "file"} - expected_types: typing.Any = {"id": "integer", "file": None} - response = client.files.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.files.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.files.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.files.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"id": 1, "file": "file"} - expected_types: typing.Any = {"id": "integer", "file": None} - response = client.files.update(id_=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.files.update(id_=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"id": 1, "file": "file"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"id": "integer", "file": None}}) - response = client.files.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.files.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete_many(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.files.delete_many(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.files.delete_many(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_download(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.files.download(filename="filename") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.files.download(filename="filename") # type: ignore[func-returns-value] - is None - ) diff --git a/tests/test_import_storage.py b/tests/test_import_storage.py deleted file mode 100644 index 00b2a428b..000000000 --- a/tests/test_import_storage.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"name": "name", "title": "title"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"name": None, "title": None}}) - response = client.import_storage.list_types() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.list_types() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_jwt_settings.py b/tests/test_jwt_settings.py deleted file mode 100644 index e17aa3d14..000000000 --- a/tests/test_jwt_settings.py +++ /dev/null @@ -1,44 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "api_tokens_enabled": True, - "legacy_api_tokens_enabled": True, - "api_token_ttl_days": 1, - } - expected_types: typing.Any = { - "api_tokens_enabled": None, - "legacy_api_tokens_enabled": None, - "api_token_ttl_days": "integer", - } - response = client.jwt_settings.get() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.jwt_settings.get() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "api_tokens_enabled": True, - "legacy_api_tokens_enabled": True, - "api_token_ttl_days": 1, - } - expected_types: typing.Any = { - "api_tokens_enabled": None, - "legacy_api_tokens_enabled": None, - "api_token_ttl_days": "integer", - } - response = client.jwt_settings.create(api_tokens_enabled=True, legacy_api_tokens_enabled=True, api_token_ttl_days=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.jwt_settings.create( - api_tokens_enabled=True, legacy_api_tokens_enabled=True, api_token_ttl_days=1 - ) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_ml.py b/tests/test_ml.py deleted file mode 100644 index 0a5badc22..000000000 --- a/tests/test_ml.py +++ /dev/null @@ -1,229 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "state": "CO", - "readable_state": "readable_state", - "is_interactive": True, - "url": "url", - "error_message": "error_message", - "title": "title", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "basic_auth_pass_is_set": "basic_auth_pass_is_set", - "description": "description", - "extra_params": {"key": "value"}, - "model_version": "model_version", - "timeout": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "auto_update": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "state": None, - "readable_state": None, - "is_interactive": None, - "url": None, - "error_message": None, - "title": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "basic_auth_pass_is_set": None, - "description": None, - "extra_params": ("dict", {0: (None, None)}), - "model_version": None, - "timeout": None, - "created_at": "datetime", - "updated_at": "datetime", - "auto_update": None, - "project": "integer", - } - }, - ) - response = client.ml.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "url": "url", - "project": 1, - "is_interactive": True, - "title": "title", - "description": "description", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "extra_params": {"key": "value"}, - "timeout": 1, - } - expected_types: typing.Any = { - "url": None, - "project": "integer", - "is_interactive": None, - "title": None, - "description": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "extra_params": ("dict", {0: (None, None)}), - "timeout": "integer", - } - response = client.ml.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "state": "CO", - "readable_state": "readable_state", - "is_interactive": True, - "url": "url", - "error_message": "error_message", - "title": "title", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "basic_auth_pass_is_set": "basic_auth_pass_is_set", - "description": "description", - "extra_params": {"key": "value"}, - "model_version": "model_version", - "timeout": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "auto_update": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "state": None, - "readable_state": None, - "is_interactive": None, - "url": None, - "error_message": None, - "title": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "basic_auth_pass_is_set": None, - "description": None, - "extra_params": ("dict", {0: (None, None)}), - "model_version": None, - "timeout": None, - "created_at": "datetime", - "updated_at": "datetime", - "auto_update": None, - "project": "integer", - } - response = client.ml.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "url": "url", - "project": 1, - "is_interactive": True, - "title": "title", - "description": "description", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "extra_params": {"key": "value"}, - "timeout": 1, - } - expected_types: typing.Any = { - "url": None, - "project": "integer", - "is_interactive": None, - "title": None, - "description": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "extra_params": ("dict", {0: (None, None)}), - "timeout": "integer", - } - response = client.ml.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_predict_interactive(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_train(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.train(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.train(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_list_model_versions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] - is None - ) diff --git a/tests/test_model_providers.py b/tests/test_model_providers.py deleted file mode 100644 index 353ad2339..000000000 --- a/tests/test_model_providers.py +++ /dev/null @@ -1,194 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - }, - ) - response = client.model_providers.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - expected_types: typing.Any = { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - response = client.model_providers.create(provider="OpenAI") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.create(provider="OpenAI") - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - expected_types: typing.Any = { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - response = client.model_providers.get(pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.get(pk=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.model_providers.delete(pk=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.model_providers.delete(pk=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - expected_types: typing.Any = { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - response = client.model_providers.update(pk=1, provider="OpenAI") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.update(pk=1, provider="OpenAI") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_predictions.py b/tests/test_predictions.py deleted file mode 100644 index f8559d6b3..000000000 --- a/tests/test_predictions.py +++ /dev/null @@ -1,404 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - }, - ) - response = client.predictions.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - response = client.predictions.create( - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.create( - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - response = client.predictions.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.predictions.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.predictions.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - response = client.predictions.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_projects.py b/tests/test_projects.py deleted file mode 100644 index 2a985c9e2..000000000 --- a/tests/test_projects.py +++ /dev/null @@ -1,282 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "My project", - "description": "My first project", - "label_config": "[...]", - "expert_instruction": "Label all cats", - "show_instruction": True, - "show_skip_button": True, - "enable_empty_annotation": True, - "show_annotation_history": True, - "reveal_preannotations_interactively": True, - "show_collab_predictions": True, - "maximum_annotations": 1, - "color": "color", - "control_weights": { - "my_bbox": {"type": "RectangleLabels", "labels": {"Car": 1, "Airplaine": 0.5}, "overall": 0.33} - }, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "label_config": None, - "expert_instruction": None, - "show_instruction": None, - "show_skip_button": None, - "enable_empty_annotation": None, - "show_annotation_history": None, - "reveal_preannotations_interactively": None, - "show_collab_predictions": None, - "maximum_annotations": "integer", - "color": None, - "control_weights": ("dict", {0: (None, None)}), - } - response = client.projects.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "My project", - "description": "My first project", - "label_config": "[...]", - "expert_instruction": "Label all cats", - "show_instruction": True, - "show_skip_button": True, - "enable_empty_annotation": True, - "show_annotation_history": True, - "organization": 1, - "prompts": [ - { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - ], - "color": "#FF0000", - "maximum_annotations": 1, - "annotation_limit_count": 10, - "annotation_limit_percent": 50, - "is_published": True, - "model_version": "1.0.0", - "is_draft": False, - "created_by": { - "id": 1, - "first_name": "Jo", - "last_name": "Doe", - "email": "manager@humansignal.com", - "avatar": "avatar", - }, - "created_at": "2023-08-24T14:15:22Z", - "min_annotations_to_start_training": 0, - "start_training_on_annotation_update": True, - "show_collab_predictions": True, - "num_tasks_with_annotations": 10, - "task_number": 100, - "useful_annotation_number": 10, - "ground_truth_number": 5, - "skipped_annotations_number": 0, - "total_annotations_number": 10, - "total_predictions_number": 0, - "sampling": "Sequential sampling", - "show_ground_truth_first": True, - "show_overlap_first": True, - "overlap_cohort_percentage": 100, - "task_data_login": "user", - "task_data_password": "secret", - "control_weights": {"key": "value"}, - "parsed_label_config": {"key": "value"}, - "evaluate_predictions_automatically": False, - "config_has_control_tags": True, - "skip_queue": "REQUEUE_FOR_ME", - "reveal_preannotations_interactively": True, - "pinned_at": "2023-08-24T14:15:22Z", - "finished_task_number": 10, - "queue_total": 10, - "queue_done": 100, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "label_config": None, - "expert_instruction": None, - "show_instruction": None, - "show_skip_button": None, - "enable_empty_annotation": None, - "show_annotation_history": None, - "organization": "integer", - "prompts": ( - "list", - { - 0: { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - }, - ), - "color": None, - "maximum_annotations": "integer", - "annotation_limit_count": "integer", - "annotation_limit_percent": None, - "is_published": None, - "model_version": None, - "is_draft": None, - "created_by": {"id": "integer", "first_name": None, "last_name": None, "email": None, "avatar": None}, - "created_at": "datetime", - "min_annotations_to_start_training": "integer", - "start_training_on_annotation_update": None, - "show_collab_predictions": None, - "num_tasks_with_annotations": "integer", - "task_number": "integer", - "useful_annotation_number": "integer", - "ground_truth_number": "integer", - "skipped_annotations_number": "integer", - "total_annotations_number": "integer", - "total_predictions_number": "integer", - "sampling": None, - "show_ground_truth_first": None, - "show_overlap_first": None, - "overlap_cohort_percentage": "integer", - "task_data_login": None, - "task_data_password": None, - "control_weights": ("dict", {0: (None, None)}), - "parsed_label_config": ("dict", {0: (None, None)}), - "evaluate_predictions_automatically": None, - "config_has_control_tags": None, - "skip_queue": None, - "reveal_preannotations_interactively": None, - "pinned_at": "datetime", - "finished_task_number": "integer", - "queue_total": "integer", - "queue_done": "integer", - } - response = client.projects.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.projects.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.projects.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "My project", - "description": "My first project", - "label_config": "[...]", - "expert_instruction": "Label all cats", - "show_instruction": True, - "show_skip_button": True, - "enable_empty_annotation": True, - "show_annotation_history": True, - "reveal_preannotations_interactively": True, - "show_collab_predictions": True, - "maximum_annotations": 1, - "annotation_limit_count": 1, - "annotation_limit_percent": 1.1, - "color": "color", - "control_weights": { - "my_bbox": {"type": "RectangleLabels", "labels": {"Car": 1, "Airplaine": 0.5}, "overall": 0.33} - }, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "label_config": None, - "expert_instruction": None, - "show_instruction": None, - "show_skip_button": None, - "enable_empty_annotation": None, - "show_annotation_history": None, - "reveal_preannotations_interactively": None, - "show_collab_predictions": None, - "maximum_annotations": "integer", - "annotation_limit_count": "integer", - "annotation_limit_percent": None, - "color": None, - "control_weights": ("dict", {0: (None, None)}), - } - response = client.projects.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_import_tasks(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "task_count": 1, - "annotation_count": 1, - "predictions_count": 1, - "duration": 1.1, - "file_upload_ids": [1], - "could_be_tasks_list": True, - "found_formats": ["found_formats"], - "data_columns": ["data_columns"], - } - expected_types: typing.Any = { - "task_count": "integer", - "annotation_count": "integer", - "predictions_count": "integer", - "duration": None, - "file_upload_ids": ("list", {0: "integer"}), - "could_be_tasks_list": None, - "found_formats": ("list", {0: None}), - "data_columns": ("list", {0: None}), - } - response = client.projects.import_tasks(id=1, request=[{"key": "value"}]) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.import_tasks(id=1, request=[{"key": "value"}]) - validate_response(async_response, expected_response, expected_types) - - -async def test_validate_config(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"label_config": "label_config"} - expected_types: typing.Any = {"label_config": None} - response = client.projects.validate_config(id=1, label_config="label_config") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.validate_config(id=1, label_config="label_config") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_prompts.py b/tests/test_prompts.py deleted file mode 100644 index d0e1f0ca1..000000000 --- a/tests/test_prompts.py +++ /dev/null @@ -1,180 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - }, - ) - response = client.prompts.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - expected_types: typing.Any = { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - response = client.prompts.create(title="title", input_fields=["input_fields"], output_classes=["output_classes"]) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.create( - title="title", input_fields=["input_fields"], output_classes=["output_classes"] - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - expected_types: typing.Any = { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - response = client.prompts.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.prompts.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.prompts.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - expected_types: typing.Any = { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - response = client.prompts.update( - id=1, title="title", input_fields=["input_fields"], output_classes=["output_classes"] - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.update( - id=1, title="title", input_fields=["input_fields"], output_classes=["output_classes"] - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_batch_predictions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"detail": "detail"} - expected_types: typing.Any = {"detail": None} - response = client.prompts.batch_predictions() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.batch_predictions() - validate_response(async_response, expected_response, expected_types) - - -async def test_batch_failed_predictions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"detail": "detail"} - expected_types: typing.Any = {"detail": None} - response = client.prompts.batch_failed_predictions() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.batch_failed_predictions() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_tasks.py b/tests/test_tasks.py deleted file mode 100644 index dea5844c0..000000000 --- a/tests/test_tasks.py +++ /dev/null @@ -1,351 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_create_many_status(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "preannotated_from_fields": {"key": "value"}, - "commit_to_project": True, - "return_task_ids": True, - "status": "created", - "url": "url", - "traceback": "traceback", - "error": "error", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "task_count": 1, - "annotation_count": 1, - "prediction_count": 1, - "duration": 1, - "file_upload_ids": {"key": "value"}, - "could_be_tasks_list": True, - "found_formats": {"key": "value"}, - "data_columns": {"key": "value"}, - "tasks": {"key": "value"}, - "task_ids": {"key": "value"}, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "preannotated_from_fields": ("dict", {0: (None, None)}), - "commit_to_project": None, - "return_task_ids": None, - "status": None, - "url": None, - "traceback": None, - "error": None, - "created_at": "datetime", - "updated_at": "datetime", - "finished_at": "datetime", - "task_count": "integer", - "annotation_count": "integer", - "prediction_count": "integer", - "duration": "integer", - "file_upload_ids": ("dict", {0: (None, None)}), - "could_be_tasks_list": None, - "found_formats": ("dict", {0: (None, None)}), - "data_columns": ("dict", {0: (None, None)}), - "tasks": ("dict", {0: (None, None)}), - "task_ids": ("dict", {0: (None, None)}), - "project": "integer", - } - response = client.tasks.create_many_status(id=1, import_pk="import_pk") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.create_many_status(id=1, import_pk="import_pk") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete_all_tasks(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "data": {"image": "https://example.com/image.jpg", "text": "Hello, AI!"}, - "meta": {"key": "value"}, - "created_at": "2024-06-18T23:45:46Z", - "updated_at": "2024-06-18T23:45:46Z", - "is_labeled": False, - "overlap": 1, - "inner_id": 1, - "total_annotations": 0, - "cancelled_annotations": 0, - "total_predictions": 0, - "comment_count": 0, - "unresolved_comment_count": 0, - "last_comment_updated_at": "2024-01-15T09:30:00Z", - "project": 1, - "updated_by": [{"user_id": 1}], - "file_upload": "42d46c4c-my-pic.jpeg", - "comment_authors": [1], - } - expected_types: typing.Any = { - "id": "integer", - "data": ("dict", {0: (None, None), 1: (None, None)}), - "meta": ("dict", {0: (None, None)}), - "created_at": "datetime", - "updated_at": "datetime", - "is_labeled": None, - "overlap": "integer", - "inner_id": "integer", - "total_annotations": "integer", - "cancelled_annotations": "integer", - "total_predictions": "integer", - "comment_count": "integer", - "unresolved_comment_count": "integer", - "last_comment_updated_at": "datetime", - "project": "integer", - "updated_by": ("list", {0: ("dict", {0: (None, None)})}), - "file_upload": None, - "comment_authors": ("list", {0: "integer"}), - } - response = client.tasks.create(data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, project=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.create( - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, project=1 - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 13, - "predictions": [ - { - "result": [{"key": "value"}], - "score": 1.1, - "model_version": "model_version", - "model": {"key": "value"}, - "model_run": {"key": "value"}, - "task": 1, - "project": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - ], - "annotations": [ - { - "id": 1, - "result": [{"key": "value"}], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": {"key": "value"}, - "unique_id": "unique_id", - "was_cancelled": True, - "ground_truth": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 1.1, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - ], - "drafts": [ - {"result": [{"key": "value"}], "created_at": "2024-01-15T09:30:00Z", "updated_at": "2024-01-15T09:30:00Z"} - ], - "annotators": [1], - "inner_id": 2, - "cancelled_annotations": 0, - "total_annotations": 0, - "total_predictions": 0, - "completed_at": "2024-01-15T09:30:00Z", - "annotations_results": "", - "predictions_results": "", - "predictions_score": 1.1, - "file_upload": "6b25fc23-some_3.mp4", - "storage_filename": "storage_filename", - "annotations_ids": "", - "predictions_model_versions": "", - "avg_lead_time": 1.1, - "draft_exists": False, - "updated_by": [{"key": "value"}], - "data": {"image": "/data/upload/1/6b25fc23-some_3.mp4"}, - "meta": {"key": "value"}, - "created_at": "2024-06-18T23:45:46Z", - "updated_at": "2024-06-18T23:45:46Z", - "is_labeled": False, - "overlap": 1, - "comment_count": 0, - "unresolved_comment_count": 0, - "last_comment_updated_at": "2024-01-15T09:30:00Z", - "project": 1, - "comment_authors": [1], - } - expected_types: typing.Any = { - "id": "integer", - "predictions": ( - "list", - { - 0: { - "result": ("list", {0: ("dict", {0: (None, None)})}), - "score": None, - "model_version": None, - "model": ("dict", {0: (None, None)}), - "model_run": ("dict", {0: (None, None)}), - "task": "integer", - "project": None, - "created_at": "datetime", - "updated_at": "datetime", - } - }, - ), - "annotations": ( - "list", - { - 0: { - "id": "integer", - "result": ("list", {0: ("dict", {0: (None, None)})}), - "created_username": None, - "created_ago": None, - "completed_by": ("dict", {0: (None, None)}), - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - }, - ), - "drafts": ( - "list", - { - 0: { - "result": ("list", {0: ("dict", {0: (None, None)})}), - "created_at": "datetime", - "updated_at": "datetime", - } - }, - ), - "annotators": ("list", {0: "integer"}), - "inner_id": "integer", - "cancelled_annotations": "integer", - "total_annotations": "integer", - "total_predictions": "integer", - "completed_at": "datetime", - "annotations_results": None, - "predictions_results": None, - "predictions_score": None, - "file_upload": None, - "storage_filename": None, - "annotations_ids": None, - "predictions_model_versions": None, - "avg_lead_time": None, - "draft_exists": None, - "updated_by": ("list", {0: ("dict", {0: (None, None)})}), - "data": ("dict", {0: (None, None)}), - "meta": ("dict", {0: (None, None)}), - "created_at": "datetime", - "updated_at": "datetime", - "is_labeled": None, - "overlap": "integer", - "comment_count": "integer", - "unresolved_comment_count": "integer", - "last_comment_updated_at": "datetime", - "project": "integer", - "comment_authors": ("list", {0: "integer"}), - } - response = client.tasks.get(id="id") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.get(id="id") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.tasks.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.tasks.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "data": {"image": "https://example.com/image.jpg", "text": "Hello, AI!"}, - "meta": {"key": "value"}, - "created_at": "2024-06-18T23:45:46Z", - "updated_at": "2024-06-18T23:45:46Z", - "is_labeled": False, - "overlap": 1, - "inner_id": 1, - "total_annotations": 0, - "cancelled_annotations": 0, - "total_predictions": 0, - "comment_count": 0, - "unresolved_comment_count": 0, - "last_comment_updated_at": "2024-01-15T09:30:00Z", - "project": 1, - "updated_by": [{"user_id": 1}], - "file_upload": "42d46c4c-my-pic.jpeg", - "comment_authors": [1], - } - expected_types: typing.Any = { - "id": "integer", - "data": ("dict", {0: (None, None), 1: (None, None)}), - "meta": ("dict", {0: (None, None)}), - "created_at": "datetime", - "updated_at": "datetime", - "is_labeled": None, - "overlap": "integer", - "inner_id": "integer", - "total_annotations": "integer", - "cancelled_annotations": "integer", - "total_predictions": "integer", - "comment_count": "integer", - "unresolved_comment_count": "integer", - "last_comment_updated_at": "datetime", - "project": "integer", - "updated_by": ("list", {0: ("dict", {0: (None, None)})}), - "file_upload": None, - "comment_authors": ("list", {0: "integer"}), - } - response = client.tasks.update( - id="id", data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, project=1 - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.update( - id="id", data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, project=1 - ) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_tokens.py b/tests/test_tokens.py deleted file mode 100644 index 098128df9..000000000 --- a/tests/test_tokens.py +++ /dev/null @@ -1,62 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_blacklist(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.tokens.blacklist(refresh="refresh") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.tokens.blacklist(refresh="refresh") # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"token": "token", "created_at": "created_at", "expires_at": "expires_at"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - {0: {"token": None, "created_at": None, "expires_at": None}}, - ) - response = client.tokens.get() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.get() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"token": "token", "created_at": "created_at", "expires_at": "expires_at"} - expected_types: typing.Any = {"token": None, "created_at": None, "expires_at": None} - response = client.tokens.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_refresh(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"access": "access"} - expected_types: typing.Any = {"access": None} - response = client.tokens.refresh(refresh="refresh") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.refresh(refresh="refresh") - validate_response(async_response, expected_response, expected_types) - - -async def test_rotate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"refresh": "refresh"} - expected_types: typing.Any = {"refresh": None} - response = client.tokens.rotate(refresh="refresh") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.rotate(refresh="refresh") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_users.py b/tests/test_users.py deleted file mode 100644 index e6e5582b0..000000000 --- a/tests/test_users.py +++ /dev/null @@ -1,226 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_reset_token(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"token": "token"} - expected_types: typing.Any = {"token": None} - response = client.users.reset_token() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.reset_token() - validate_response(async_response, expected_response, expected_types) - - -async def test_get_token(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"detail": "detail"} - expected_types: typing.Any = {"detail": None} - response = client.users.get_token() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.get_token() - validate_response(async_response, expected_response, expected_types) - - -async def test_whoami(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.whoami() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.whoami() - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - }, - ) - response = client.users.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.users.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.users.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.update(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_versions.py b/tests/test_versions.py deleted file mode 100644 index 16e3f4d37..000000000 --- a/tests/test_versions.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "release": "release", - "label-studio-os-package": {"key": "value"}, - "label-studio-os-backend": {"key": "value"}, - "label-studio-frontend": {"key": "value"}, - "dm2": {"key": "value"}, - "label-studio-converter": {"key": "value"}, - "edition": "Community", - "lsf": {"key": "value"}, - "backend": {"key": "value"}, - } - expected_types: typing.Any = { - "release": None, - "label-studio-os-package": ("dict", {0: (None, None)}), - "label-studio-os-backend": ("dict", {0: (None, None)}), - "label-studio-frontend": ("dict", {0: (None, None)}), - "dm2": ("dict", {0: (None, None)}), - "label-studio-converter": ("dict", {0: (None, None)}), - "edition": None, - "lsf": ("dict", {0: (None, None)}), - "backend": ("dict", {0: (None, None)}), - } - response = client.versions.get() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.versions.get() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_views.py b/tests/test_views.py deleted file mode 100644 index f9943d8a6..000000000 --- a/tests/test_views.py +++ /dev/null @@ -1,175 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - }, - ) - response = client.views.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - response = client.views.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_delete_all(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.views.delete_all(project=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.views.delete_all(project=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - response = client.views.get(id="id") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.get(id="id") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.views.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.views.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - response = client.views.update(id="id") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.update(id="id") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_workspaces.py b/tests/test_workspaces.py deleted file mode 100644 index dc43be593..000000000 --- a/tests/test_workspaces.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - }, - ) - response = client.workspaces.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - response = client.workspaces.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - response = client.workspaces.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.workspaces.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.workspaces.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - response = client.workspaces.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.update(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/utilities.py b/tests/utilities.py deleted file mode 100644 index 3d228806a..000000000 --- a/tests/utilities.py +++ /dev/null @@ -1,162 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import uuid - -from dateutil import parser - -import pydantic - - -def cast_field(json_expectation: typing.Any, type_expectation: typing.Any) -> typing.Any: - # Cast these specific types which come through as string and expect our - # models to cast to the correct type. - if type_expectation == "uuid": - return uuid.UUID(json_expectation) - elif type_expectation == "date": - return parser.parse(json_expectation).date() - elif type_expectation == "datetime": - return parser.parse(json_expectation) - elif type_expectation == "set": - return set(json_expectation) - elif type_expectation == "integer": - # Necessary as we allow numeric keys, but JSON makes them strings - return int(json_expectation) - - return json_expectation - - -def validate_field(response: typing.Any, json_expectation: typing.Any, type_expectation: typing.Any) -> None: - # Allow for an escape hatch if the object cannot be validated - if type_expectation == "no_validate": - return - - is_container_of_complex_type = False - # Parse types in containers, note that dicts are handled within `validate_response` - if isinstance(json_expectation, list): - if isinstance(type_expectation, tuple): - container_expectation = type_expectation[0] - contents_expectation = type_expectation[1] - - cast_json_expectation = [] - for idx, ex in enumerate(json_expectation): - if isinstance(contents_expectation, dict): - entry_expectation = contents_expectation.get(idx) - if isinstance(entry_expectation, dict): - is_container_of_complex_type = True - validate_response( - response=response[idx], - json_expectation=ex, - type_expectations=entry_expectation, - ) - else: - cast_json_expectation.append(cast_field(ex, entry_expectation)) - else: - cast_json_expectation.append(ex) - json_expectation = cast_json_expectation - - # Note that we explicitly do not allow for sets of pydantic models as they are not hashable, so - # if any of the values of the set have a type_expectation of a dict, we're assuming it's a pydantic - # model and keeping it a list. - if container_expectation != "set" or not any( - map( - lambda value: isinstance(value, dict), - list(contents_expectation.values()), - ) - ): - json_expectation = cast_field(json_expectation, container_expectation) - elif isinstance(type_expectation, tuple): - container_expectation = type_expectation[0] - contents_expectation = type_expectation[1] - if isinstance(contents_expectation, dict): - json_expectation = { - cast_field( - key, - contents_expectation.get(idx)[0] # type: ignore - if contents_expectation.get(idx) is not None - else None, - ): cast_field( - value, - contents_expectation.get(idx)[1] # type: ignore - if contents_expectation.get(idx) is not None - else None, - ) - for idx, (key, value) in enumerate(json_expectation.items()) - } - else: - json_expectation = cast_field(json_expectation, container_expectation) - elif type_expectation is not None: - json_expectation = cast_field(json_expectation, type_expectation) - - # When dealing with containers of models, etc. we're validating them implicitly, so no need to check the resultant list - if not is_container_of_complex_type: - assert ( - json_expectation == response - ), "Primitives found, expected: {0} (type: {1}), Actual: {2} (type: {3})".format( - json_expectation, type(json_expectation), response, type(response) - ) - - -# Arg type_expectations is a deeply nested structure that matches the response, but with the values replaced with the expected types -def validate_response(response: typing.Any, json_expectation: typing.Any, type_expectations: typing.Any) -> None: - # Allow for an escape hatch if the object cannot be validated - if type_expectations == "no_validate": - return - - if ( - not isinstance(response, list) - and not isinstance(response, dict) - and not issubclass(type(response), pydantic.BaseModel) - ): - validate_field( - response=response, - json_expectation=json_expectation, - type_expectation=type_expectations, - ) - return - - if isinstance(response, list): - assert len(response) == len(json_expectation), "Length mismatch, expected: {0}, Actual: {1}".format( - len(response), len(json_expectation) - ) - content_expectation = type_expectations - if isinstance(type_expectations, tuple): - content_expectation = type_expectations[1] - for idx, item in enumerate(response): - validate_response( - response=item, - json_expectation=json_expectation[idx], - type_expectations=content_expectation[idx], - ) - else: - response_json = response - if issubclass(type(response), pydantic.BaseModel): - response_json = response.dict(by_alias=True) - - for key, value in json_expectation.items(): - assert key in response_json, "Field {0} not found within the response object: {1}".format( - key, response_json - ) - - type_expectation = None - if type_expectations is not None and isinstance(type_expectations, dict): - type_expectation = type_expectations.get(key) - - # If your type_expectation is a tuple then you have a container field, process it as such - # Otherwise, we're just validating a single field that's a pydantic model. - if isinstance(value, dict) and not isinstance(type_expectation, tuple): - validate_response( - response=response_json[key], - json_expectation=value, - type_expectations=type_expectation, - ) - else: - validate_field( - response=response_json[key], - json_expectation=value, - type_expectation=type_expectation, - ) - - # Ensure there are no additional fields here either - del response_json[key] - assert len(response_json) == 0, "Additional fields found, expected None: {0}".format(response_json) diff --git a/tests/utils/assets/models/__init__.py b/tests/utils/assets/models/__init__.py index 3a1c852e7..2cf012635 100644 --- a/tests/utils/assets/models/__init__.py +++ b/tests/utils/assets/models/__init__.py @@ -5,7 +5,7 @@ from .circle import CircleParams from .object_with_defaults import ObjectWithDefaultsParams from .object_with_optional_field import ObjectWithOptionalFieldParams -from .shape import ShapeParams, Shape_CircleParams, Shape_SquareParams +from .shape import Shape_CircleParams, Shape_SquareParams, ShapeParams from .square import SquareParams from .undiscriminated_shape import UndiscriminatedShapeParams diff --git a/tests/utils/assets/models/circle.py b/tests/utils/assets/models/circle.py index 09b8e0647..c54c57b40 100644 --- a/tests/utils/assets/models/circle.py +++ b/tests/utils/assets/models/circle.py @@ -3,7 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions -import typing_extensions + from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/object_with_defaults.py b/tests/utils/assets/models/object_with_defaults.py index ef14f7b2c..a977b1d2a 100644 --- a/tests/utils/assets/models/object_with_defaults.py +++ b/tests/utils/assets/models/object_with_defaults.py @@ -3,7 +3,6 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions -import typing_extensions class ObjectWithDefaultsParams(typing_extensions.TypedDict): diff --git a/tests/utils/assets/models/object_with_optional_field.py b/tests/utils/assets/models/object_with_optional_field.py index ee4f9dbfe..802e01ff2 100644 --- a/tests/utils/assets/models/object_with_optional_field.py +++ b/tests/utils/assets/models/object_with_optional_field.py @@ -2,16 +2,17 @@ # This file was auto-generated by Fern from our API Definition. -import typing_extensions -import typing -import typing_extensions -from label_studio_sdk.core.serialization import FieldMetadata import datetime as dt +import typing import uuid + +import typing_extensions from .color import Color from .shape import ShapeParams from .undiscriminated_shape import UndiscriminatedShapeParams +from label_studio_sdk.core.serialization import FieldMetadata + class ObjectWithOptionalFieldParams(typing_extensions.TypedDict): literal: typing.Literal["lit_one"] diff --git a/tests/utils/assets/models/shape.py b/tests/utils/assets/models/shape.py index 820dec7a6..e9d51e32c 100644 --- a/tests/utils/assets/models/shape.py +++ b/tests/utils/assets/models/shape.py @@ -3,9 +3,11 @@ # This file was auto-generated by Fern from our API Definition. from __future__ import annotations -import typing_extensions -import typing_extensions + import typing + +import typing_extensions + from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/square.py b/tests/utils/assets/models/square.py index b70897be3..d9d65afca 100644 --- a/tests/utils/assets/models/square.py +++ b/tests/utils/assets/models/square.py @@ -3,7 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions -import typing_extensions + from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/undiscriminated_shape.py b/tests/utils/assets/models/undiscriminated_shape.py index 68876a23c..99f12b300 100644 --- a/tests/utils/assets/models/undiscriminated_shape.py +++ b/tests/utils/assets/models/undiscriminated_shape.py @@ -3,6 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .circle import CircleParams from .square import SquareParams diff --git a/tests/utils/test_query_encoding.py b/tests/utils/test_query_encoding.py index ff439eb34..6276c4f4f 100644 --- a/tests/utils/test_query_encoding.py +++ b/tests/utils/test_query_encoding.py @@ -34,4 +34,4 @@ def test_query_encoding_deep_object_arrays() -> None: def test_encode_query_with_none() -> None: encoded = encode_query(None) - assert encoded == None + assert encoded is None diff --git a/tests/utils/test_serialization.py b/tests/utils/test_serialization.py index d085f645f..2654074e2 100644 --- a/tests/utils/test_serialization.py +++ b/tests/utils/test_serialization.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from typing import List, Any +from typing import Any, List -from label_studio_sdk.core.serialization import convert_and_respect_annotation_metadata -from .assets.models import ShapeParams, ObjectWithOptionalFieldParams +from .assets.models import ObjectWithOptionalFieldParams, ShapeParams +from label_studio_sdk.core.serialization import convert_and_respect_annotation_metadata UNION_TEST: ShapeParams = {"radius_measurement": 1.0, "shape_type": "circle", "id": "1"} UNION_TEST_CONVERTED = {"shapeType": "circle", "radiusMeasurement": 1.0, "id": "1"} diff --git a/tests/workspaces/__init__.py b/tests/workspaces/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/workspaces/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/workspaces/test_members.py b/tests/workspaces/test_members.py deleted file mode 100644 index 3724adc0d..000000000 --- a/tests/workspaces/test_members.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"user": {"key": "value"}}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ("list", {0: {"user": ("dict", {0: (None, None)})}}) - response = client.workspaces.members.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.members.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"user": 1} - expected_types: typing.Any = {"user": "integer"} - response = client.workspaces.members.create(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.members.create(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] - is None - ) From 719507e84d8c31744d1943ad9a80773c5b0370c0 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 20:00:09 +0000 Subject: [PATCH 2/4] SDK regeneration From 4a0bf25560aed32b6ecb3f70ee0f1b39589613b8 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 20:03:48 +0000 Subject: [PATCH 3/4] SDK regeneration --- .gitignore | 4 +- poetry.lock | 109 +- pyproject.toml | 26 +- requirements.txt | 10 +- src/label_studio_sdk/__init__.py | 2 - src/label_studio_sdk/actions/__init__.py | 2 - src/label_studio_sdk/actions/client.py | 148 +- src/label_studio_sdk/actions/raw_client.py | 223 --- .../actions/types/__init__.py | 2 - .../types/actions_create_request_filters.py | 14 +- ...tions_create_request_filters_items_item.py | 18 +- .../actions_create_request_selected_items.py | 9 +- ..._create_request_selected_items_excluded.py | 8 +- ..._create_request_selected_items_included.py | 8 +- src/label_studio_sdk/annotations/__init__.py | 2 - src/label_studio_sdk/annotations/client.py | 392 ++++-- .../annotations/raw_client.py | 794 ----------- .../annotations/types/__init__.py | 2 - ...ions_create_bulk_request_selected_items.py | 8 +- .../annotations_create_bulk_response_item.py | 4 +- src/label_studio_sdk/base_client.py | 65 +- src/label_studio_sdk/comments/__init__.py | 2 - src/label_studio_sdk/comments/client.py | 279 +++- src/label_studio_sdk/comments/raw_client.py | 529 ------- src/label_studio_sdk/core/__init__.py | 5 - src/label_studio_sdk/core/api_error.py | 18 +- src/label_studio_sdk/core/file.py | 3 +- src/label_studio_sdk/core/force_multipart.py | 16 - src/label_studio_sdk/core/http_client.py | 96 +- src/label_studio_sdk/core/http_response.py | 55 - src/label_studio_sdk/core/jsonable_encoder.py | 1 + src/label_studio_sdk/core/pagination.py | 94 +- .../core/pydantic_utilities.py | 183 ++- src/label_studio_sdk/core/serialization.py | 10 +- src/label_studio_sdk/errors/__init__.py | 2 - .../errors/bad_request_error.py | 7 +- .../errors/internal_server_error.py | 6 +- .../errors/not_found_error.py | 7 +- .../errors/unauthorized_error.py | 7 +- .../export_storage/__init__.py | 2 - .../export_storage/azure/__init__.py | 2 - .../export_storage/azure/client.py | 433 ++++-- .../export_storage/azure/raw_client.py | 881 ------------ .../export_storage/azure/types/__init__.py | 2 - .../azure/types/azure_create_response.py | 4 +- .../azure/types/azure_update_response.py | 4 +- src/label_studio_sdk/export_storage/client.py | 130 +- .../export_storage/gcs/__init__.py | 2 - .../export_storage/gcs/client.py | 433 ++++-- .../export_storage/gcs/raw_client.py | 881 ------------ .../export_storage/gcs/types/__init__.py | 2 - .../gcs/types/gcs_create_response.py | 4 +- .../gcs/types/gcs_update_response.py | 4 +- .../export_storage/local/__init__.py | 2 - .../export_storage/local/client.py | 409 ++++-- .../export_storage/local/raw_client.py | 821 ----------- .../export_storage/local/types/__init__.py | 2 - .../local/types/local_create_response.py | 4 +- .../local/types/local_update_response.py | 4 +- .../export_storage/raw_client.py | 93 -- .../export_storage/redis/__init__.py | 2 - .../export_storage/redis/client.py | 445 ++++-- .../export_storage/redis/raw_client.py | 911 ------------ .../export_storage/redis/types/__init__.py | 2 - .../redis/types/redis_create_response.py | 4 +- .../redis/types/redis_update_response.py | 4 +- .../export_storage/s3/__init__.py | 2 - .../export_storage/s3/client.py | 481 +++++-- .../export_storage/s3/raw_client.py | 999 ------------- .../export_storage/s3/types/__init__.py | 2 - .../s3/types/s3create_response.py | 4 +- .../s3/types/s3update_response.py | 4 +- .../export_storage/s3s/__init__.py | 2 - .../export_storage/s3s/client.py | 413 ++++-- .../export_storage/s3s/raw_client.py | 827 ----------- .../export_storage/types/__init__.py | 2 - ...export_storage_list_types_response_item.py | 4 +- src/label_studio_sdk/files/__init__.py | 2 - src/label_studio_sdk/files/client.py | 257 +++- src/label_studio_sdk/files/raw_client.py | 523 ------- .../import_storage/__init__.py | 2 - .../import_storage/azure/__init__.py | 2 - .../import_storage/azure/client.py | 469 +++++-- .../import_storage/azure/raw_client.py | 981 ------------- .../import_storage/azure/types/__init__.py | 2 - .../azure/types/azure_create_response.py | 4 +- .../azure/types/azure_update_response.py | 4 +- src/label_studio_sdk/import_storage/client.py | 130 +- .../import_storage/gcs/__init__.py | 2 - .../import_storage/gcs/client.py | 469 +++++-- .../import_storage/gcs/raw_client.py | 981 ------------- .../import_storage/gcs/types/__init__.py | 2 - .../gcs/types/gcs_create_response.py | 4 +- .../gcs/types/gcs_update_response.py | 4 +- .../import_storage/local/__init__.py | 2 - .../import_storage/local/client.py | 409 ++++-- .../import_storage/local/raw_client.py | 827 ----------- .../import_storage/local/types/__init__.py | 2 - .../local/types/local_create_response.py | 4 +- .../local/types/local_update_response.py | 4 +- .../import_storage/raw_client.py | 93 -- .../import_storage/redis/__init__.py | 2 - .../import_storage/redis/client.py | 445 ++++-- .../import_storage/redis/raw_client.py | 917 ------------ .../import_storage/redis/types/__init__.py | 2 - .../redis/types/redis_create_response.py | 4 +- .../redis/types/redis_update_response.py | 4 +- .../import_storage/s3/__init__.py | 2 - .../import_storage/s3/client.py | 529 ++++--- .../import_storage/s3/raw_client.py | 1129 --------------- .../import_storage/s3/types/__init__.py | 2 - .../s3/types/s3create_response.py | 4 +- .../s3/types/s3update_response.py | 4 +- .../import_storage/s3s/__init__.py | 2 - .../import_storage/s3s/client.py | 501 +++++-- .../import_storage/s3s/raw_client.py | 1047 -------------- .../import_storage/types/__init__.py | 2 - ...import_storage_list_types_response_item.py | 4 +- src/label_studio_sdk/jwt_settings/__init__.py | 2 - src/label_studio_sdk/jwt_settings/client.py | 128 +- .../jwt_settings/raw_client.py | 212 --- src/label_studio_sdk/ml/__init__.py | 2 - src/label_studio_sdk/ml/client.py | 472 +++++-- src/label_studio_sdk/ml/raw_client.py | 968 ------------- src/label_studio_sdk/ml/types/__init__.py | 2 - .../ml/types/ml_create_response.py | 4 +- .../ml/types/ml_update_response.py | 4 +- .../model_providers/__init__.py | 2 - .../model_providers/client.py | 396 ++++-- .../model_providers/raw_client.py | 706 ---------- src/label_studio_sdk/predictions/__init__.py | 2 - src/label_studio_sdk/predictions/client.py | 271 +++- .../predictions/raw_client.py | 573 -------- src/label_studio_sdk/projects/__init__.py | 2 - src/label_studio_sdk/projects/client.py | 575 ++++++-- .../projects/exports/__init__.py | 2 - .../projects/exports/client.py | 548 ++++++-- .../projects/exports/raw_client.py | 1038 -------------- .../projects/exports/types/__init__.py | 2 - .../exports/types/exports_convert_response.py | 6 +- .../exports_list_formats_response_item.py | 6 +- .../projects/pauses/__init__.py | 2 - .../projects/pauses/client.py | 289 +++- .../projects/pauses/raw_client.py | 543 ------- src/label_studio_sdk/projects/raw_client.py | 1245 ----------------- .../projects/types/__init__.py | 2 - .../types/projects_create_response.py | 4 +- .../types/projects_import_tasks_response.py | 4 +- .../projects/types/projects_list_response.py | 6 +- .../types/projects_update_response.py | 4 +- src/label_studio_sdk/prompts/__init__.py | 2 - src/label_studio_sdk/prompts/client.py | 517 +++++-- .../prompts/indicators/__init__.py | 2 - .../prompts/indicators/client.py | 129 +- .../prompts/indicators/raw_client.py | 183 --- src/label_studio_sdk/prompts/raw_client.py | 890 ------------ src/label_studio_sdk/prompts/runs/__init__.py | 2 - src/label_studio_sdk/prompts/runs/client.py | 198 ++- .../prompts/runs/raw_client.py | 348 ----- .../prompts/runs/types/__init__.py | 2 - .../prompts/types/__init__.py | 2 - ...ictions_request_failed_predictions_item.py | 4 +- ...ompts_batch_failed_predictions_response.py | 4 +- ..._batch_predictions_request_results_item.py | 4 +- .../prompts_batch_predictions_response.py | 4 +- .../prompts/versions/__init__.py | 2 - .../prompts/versions/client.py | 546 ++++++-- .../prompts/versions/raw_client.py | 1008 ------------- src/label_studio_sdk/tasks/__init__.py | 2 - src/label_studio_sdk/tasks/client.py | 412 ++++-- src/label_studio_sdk/tasks/raw_client.py | 816 ----------- src/label_studio_sdk/tasks/types/__init__.py | 2 - .../tasks/types/tasks_list_response.py | 6 +- src/label_studio_sdk/tokens/__init__.py | 2 - src/label_studio_sdk/tokens/client.py | 329 ++++- src/label_studio_sdk/tokens/raw_client.py | 495 ------- src/label_studio_sdk/types/__init__.py | 2 - .../types/access_token_response.py | 6 +- src/label_studio_sdk/types/annotation.py | 6 +- .../types/annotation_completed_by.py | 1 - .../types/annotation_filter_options.py | 4 +- .../types/annotations_dm_field.py | 6 +- .../types/api_token_response.py | 6 +- .../types/azure_blob_export_storage.py | 6 +- .../types/azure_blob_export_storage_status.py | 3 +- .../types/azure_blob_import_storage.py | 6 +- .../types/azure_blob_import_storage_status.py | 3 +- src/label_studio_sdk/types/base_task.py | 8 +- .../types/base_task_updated_by.py | 3 +- src/label_studio_sdk/types/base_user.py | 6 +- src/label_studio_sdk/types/comment.py | 6 +- .../types/converted_format.py | 6 +- .../types/data_manager_task_serializer.py | 18 +- ...ata_manager_task_serializer_drafts_item.py | 6 +- ...anager_task_serializer_predictions_item.py | 6 +- src/label_studio_sdk/types/export.py | 10 +- src/label_studio_sdk/types/export_snapshot.py | 14 +- src/label_studio_sdk/types/file_upload.py | 4 +- src/label_studio_sdk/types/filter.py | 4 +- src/label_studio_sdk/types/filter_group.py | 6 +- .../types/gcs_export_storage.py | 6 +- .../types/gcs_export_storage_status.py | 3 +- .../types/gcs_import_storage.py | 6 +- .../types/gcs_import_storage_status.py | 3 +- src/label_studio_sdk/types/inference_run.py | 10 +- .../types/inference_run_cost_estimate.py | 4 +- .../types/inference_run_status.py | 3 +- .../types/jwt_settings_response.py | 6 +- .../types/key_indicator_value.py | 4 +- src/label_studio_sdk/types/key_indicators.py | 1 - .../types/key_indicators_item.py | 10 +- ...ey_indicators_item_additional_kpis_item.py | 4 +- .../key_indicators_item_extra_kpis_item.py | 4 +- .../types/local_files_export_storage.py | 6 +- .../local_files_export_storage_status.py | 3 +- .../types/local_files_import_storage.py | 6 +- .../local_files_import_storage_status.py | 3 +- src/label_studio_sdk/types/ml_backend.py | 8 +- .../types/model_provider_connection.py | 18 +- .../model_provider_connection_provider.py | 11 +- src/label_studio_sdk/types/pause.py | 8 +- src/label_studio_sdk/types/prediction.py | 6 +- src/label_studio_sdk/types/project.py | 10 +- src/label_studio_sdk/types/project_import.py | 8 +- .../types/project_label_config.py | 6 +- .../types/project_sampling.py | 3 +- src/label_studio_sdk/types/prompt.py | 10 +- .../types/prompt_associated_projects_item.py | 1 - .../prompt_associated_projects_item_id.py | 4 +- src/label_studio_sdk/types/prompt_version.py | 10 +- .../types/prompt_version_provider.py | 11 +- .../types/redis_export_storage.py | 6 +- .../types/redis_export_storage_status.py | 3 +- .../types/redis_import_storage.py | 6 +- .../types/redis_import_storage_status.py | 3 +- .../types/refined_prompt_response.py | 8 +- .../types/rotate_token_response.py | 6 +- .../types/s3export_storage.py | 8 +- .../types/s3export_storage_status.py | 3 +- .../types/s3import_storage.py | 8 +- .../types/s3import_storage_status.py | 3 +- .../types/s3s_export_storage.py | 6 +- .../types/s3s_import_storage.py | 8 +- .../types/s3s_import_storage_status.py | 3 +- .../types/serialization_option.py | 4 +- .../types/serialization_options.py | 9 +- src/label_studio_sdk/types/task.py | 6 +- .../types/task_filter_options.py | 4 +- src/label_studio_sdk/types/user_simple.py | 4 +- src/label_studio_sdk/types/view.py | 6 +- src/label_studio_sdk/types/webhook.py | 6 +- .../types/webhook_serializer_for_update.py | 10 +- src/label_studio_sdk/types/workspace.py | 6 +- src/label_studio_sdk/users/__init__.py | 2 - src/label_studio_sdk/users/client.py | 437 ++++-- src/label_studio_sdk/users/raw_client.py | 833 ----------- src/label_studio_sdk/users/types/__init__.py | 2 - .../users/types/users_get_token_response.py | 4 +- .../users/types/users_reset_token_response.py | 4 +- src/label_studio_sdk/versions/__init__.py | 2 - src/label_studio_sdk/versions/client.py | 74 +- src/label_studio_sdk/versions/raw_client.py | 91 -- .../versions/types/__init__.py | 2 - .../versions/types/versions_get_response.py | 16 +- src/label_studio_sdk/views/__init__.py | 2 - src/label_studio_sdk/views/client.py | 322 ++++- src/label_studio_sdk/views/raw_client.py | 574 -------- src/label_studio_sdk/views/types/__init__.py | 2 - .../views/types/views_create_request_data.py | 6 +- .../views_create_request_data_filters.py | 14 +- ..._create_request_data_filters_items_item.py | 18 +- .../views/types/views_update_request_data.py | 6 +- .../views_update_request_data_filters.py | 14 +- ..._update_request_data_filters_items_item.py | 18 +- src/label_studio_sdk/webhooks/__init__.py | 2 - src/label_studio_sdk/webhooks/client.py | 413 ++++-- src/label_studio_sdk/webhooks/raw_client.py | 824 ----------- .../webhooks/types/__init__.py | 2 - src/label_studio_sdk/workspaces/__init__.py | 2 - src/label_studio_sdk/workspaces/client.py | 296 +++- .../workspaces/members/__init__.py | 2 - .../workspaces/members/client.py | 197 ++- .../workspaces/members/raw_client.py | 290 ---- .../workspaces/members/types/__init__.py | 2 - .../members/types/members_create_response.py | 4 +- .../types/members_list_response_item.py | 4 +- src/label_studio_sdk/workspaces/raw_client.py | 561 -------- tests/__init__.py | 2 + tests/conftest.py | 22 + tests/custom/test_client.py | 2 +- tests/export_storage/__init__.py | 2 + tests/export_storage/test_azure.py | 251 ++++ tests/export_storage/test_gcs.py | 251 ++++ tests/export_storage/test_local.py | 225 +++ tests/export_storage/test_redis.py | 261 ++++ tests/export_storage/test_s3.py | 291 ++++ tests/export_storage/test_s3s.py | 175 +++ tests/import_storage/__init__.py | 2 + tests/import_storage/test_azure.py | 269 ++++ tests/import_storage/test_gcs.py | 269 ++++ tests/import_storage/test_local.py | 219 +++ tests/import_storage/test_redis.py | 255 ++++ tests/import_storage/test_s3.py | 319 +++++ tests/import_storage/test_s3s.py | 329 +++++ tests/projects/__init__.py | 2 + tests/projects/test_exports.py | 265 ++++ tests/projects/test_pauses.py | 154 ++ tests/prompts/__init__.py | 2 + tests/prompts/test_indicators.py | 47 + tests/prompts/test_runs.py | 74 + tests/prompts/test_versions.py | 280 ++++ tests/test_actions.py | 65 + tests/test_annotations.py | 467 +++++++ tests/test_comments.py | 154 ++ tests/test_export_storage.py | 19 + tests/test_files.py | 78 ++ tests/test_import_storage.py | 19 + tests/test_jwt_settings.py | 44 + tests/test_ml.py | 229 +++ tests/test_model_providers.py | 194 +++ tests/test_predictions.py | 404 ++++++ tests/test_projects.py | 296 ++++ tests/test_prompts.py | 186 +++ tests/test_tasks.py | 353 +++++ tests/test_tokens.py | 66 + tests/test_users.py | 226 +++ tests/test_versions.py | 36 + tests/test_views.py | 178 +++ tests/test_workspaces.py | 154 ++ tests/utilities.py | 162 +++ tests/utils/assets/models/__init__.py | 2 +- tests/utils/assets/models/circle.py | 2 +- .../assets/models/object_with_defaults.py | 1 + .../models/object_with_optional_field.py | 9 +- tests/utils/assets/models/shape.py | 6 +- tests/utils/assets/models/square.py | 2 +- .../assets/models/undiscriminated_shape.py | 1 - tests/utils/test_http_client.py | 10 +- tests/utils/test_query_encoding.py | 18 +- tests/utils/test_serialization.py | 46 +- tests/workspaces/__init__.py | 2 + tests/workspaces/test_members.py | 42 + 342 files changed, 18109 insertions(+), 29949 deletions(-) delete mode 100644 src/label_studio_sdk/actions/raw_client.py delete mode 100644 src/label_studio_sdk/annotations/raw_client.py delete mode 100644 src/label_studio_sdk/comments/raw_client.py delete mode 100644 src/label_studio_sdk/core/force_multipart.py delete mode 100644 src/label_studio_sdk/core/http_response.py delete mode 100644 src/label_studio_sdk/export_storage/azure/raw_client.py delete mode 100644 src/label_studio_sdk/export_storage/gcs/raw_client.py delete mode 100644 src/label_studio_sdk/export_storage/local/raw_client.py delete mode 100644 src/label_studio_sdk/export_storage/raw_client.py delete mode 100644 src/label_studio_sdk/export_storage/redis/raw_client.py delete mode 100644 src/label_studio_sdk/export_storage/s3/raw_client.py delete mode 100644 src/label_studio_sdk/export_storage/s3s/raw_client.py delete mode 100644 src/label_studio_sdk/files/raw_client.py delete mode 100644 src/label_studio_sdk/import_storage/azure/raw_client.py delete mode 100644 src/label_studio_sdk/import_storage/gcs/raw_client.py delete mode 100644 src/label_studio_sdk/import_storage/local/raw_client.py delete mode 100644 src/label_studio_sdk/import_storage/raw_client.py delete mode 100644 src/label_studio_sdk/import_storage/redis/raw_client.py delete mode 100644 src/label_studio_sdk/import_storage/s3/raw_client.py delete mode 100644 src/label_studio_sdk/import_storage/s3s/raw_client.py delete mode 100644 src/label_studio_sdk/jwt_settings/raw_client.py delete mode 100644 src/label_studio_sdk/ml/raw_client.py delete mode 100644 src/label_studio_sdk/model_providers/raw_client.py delete mode 100644 src/label_studio_sdk/predictions/raw_client.py delete mode 100644 src/label_studio_sdk/projects/exports/raw_client.py delete mode 100644 src/label_studio_sdk/projects/pauses/raw_client.py delete mode 100644 src/label_studio_sdk/projects/raw_client.py delete mode 100644 src/label_studio_sdk/prompts/indicators/raw_client.py delete mode 100644 src/label_studio_sdk/prompts/raw_client.py delete mode 100644 src/label_studio_sdk/prompts/runs/raw_client.py delete mode 100644 src/label_studio_sdk/prompts/versions/raw_client.py delete mode 100644 src/label_studio_sdk/tasks/raw_client.py delete mode 100644 src/label_studio_sdk/tokens/raw_client.py delete mode 100644 src/label_studio_sdk/users/raw_client.py delete mode 100644 src/label_studio_sdk/versions/raw_client.py delete mode 100644 src/label_studio_sdk/views/raw_client.py delete mode 100644 src/label_studio_sdk/webhooks/raw_client.py delete mode 100644 src/label_studio_sdk/workspaces/members/raw_client.py delete mode 100644 src/label_studio_sdk/workspaces/raw_client.py create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/export_storage/__init__.py create mode 100644 tests/export_storage/test_azure.py create mode 100644 tests/export_storage/test_gcs.py create mode 100644 tests/export_storage/test_local.py create mode 100644 tests/export_storage/test_redis.py create mode 100644 tests/export_storage/test_s3.py create mode 100644 tests/export_storage/test_s3s.py create mode 100644 tests/import_storage/__init__.py create mode 100644 tests/import_storage/test_azure.py create mode 100644 tests/import_storage/test_gcs.py create mode 100644 tests/import_storage/test_local.py create mode 100644 tests/import_storage/test_redis.py create mode 100644 tests/import_storage/test_s3.py create mode 100644 tests/import_storage/test_s3s.py create mode 100644 tests/projects/__init__.py create mode 100644 tests/projects/test_exports.py create mode 100644 tests/projects/test_pauses.py create mode 100644 tests/prompts/__init__.py create mode 100644 tests/prompts/test_indicators.py create mode 100644 tests/prompts/test_runs.py create mode 100644 tests/prompts/test_versions.py create mode 100644 tests/test_actions.py create mode 100644 tests/test_annotations.py create mode 100644 tests/test_comments.py create mode 100644 tests/test_export_storage.py create mode 100644 tests/test_files.py create mode 100644 tests/test_import_storage.py create mode 100644 tests/test_jwt_settings.py create mode 100644 tests/test_ml.py create mode 100644 tests/test_model_providers.py create mode 100644 tests/test_predictions.py create mode 100644 tests/test_projects.py create mode 100644 tests/test_prompts.py create mode 100644 tests/test_tasks.py create mode 100644 tests/test_tokens.py create mode 100644 tests/test_users.py create mode 100644 tests/test_versions.py create mode 100644 tests/test_views.py create mode 100644 tests/test_workspaces.py create mode 100644 tests/utilities.py create mode 100644 tests/workspaces/__init__.py create mode 100644 tests/workspaces/test_members.py diff --git a/.gitignore b/.gitignore index d2e4ca808..0da665fee 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ +dist/ .mypy_cache/ -.ruff_cache/ __pycache__/ -dist/ poetry.toml +.ruff_cache/ diff --git a/poetry.lock b/poetry.lock index c6f1d19d3..3a545a803 100644 --- a/poetry.lock +++ b/poetry.lock @@ -880,55 +880,48 @@ files = [ [[package]] name = "mypy" -version = "1.13.0" +version = "1.0.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, + {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, + {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, + {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, + {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, + {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, + {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, + {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, + {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, + {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, + {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, + {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, + {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, + {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, + {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, + {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, + {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, + {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, + {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" +mypy-extensions = ">=0.4.3" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" +typing-extensions = ">=3.10" [package.extras] dmypy = ["psutil (>=4.0)"] -faster-cache = ["orjson"] install-types = ["pip"] -mypyc = ["setuptools (>=50)"] +python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] @@ -1872,29 +1865,29 @@ files = [ [[package]] name = "ruff" -version = "0.11.5" +version = "0.5.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, - {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, - {file = "ruff-0.11.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4bfd80a6ec559a5eeb96c33f832418bf0fb96752de0539905cf7b0cc1d31d779"}, - {file = "ruff-0.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0947c0a1afa75dcb5db4b34b070ec2bccee869d40e6cc8ab25aca11a7d527794"}, - {file = "ruff-0.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad871ff74b5ec9caa66cb725b85d4ef89b53f8170f47c3406e32ef040400b038"}, - {file = "ruff-0.11.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6cf918390cfe46d240732d4d72fa6e18e528ca1f60e318a10835cf2fa3dc19f"}, - {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56145ee1478582f61c08f21076dc59153310d606ad663acc00ea3ab5b2125f82"}, - {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5f66f8f1e8c9fc594cbd66fbc5f246a8d91f916cb9667e80208663ec3728304"}, - {file = "ruff-0.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80b4df4d335a80315ab9afc81ed1cff62be112bd165e162b5eed8ac55bfc8470"}, - {file = "ruff-0.11.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3068befab73620b8a0cc2431bd46b3cd619bc17d6f7695a3e1bb166b652c382a"}, - {file = "ruff-0.11.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f5da2e710a9641828e09aa98b92c9ebbc60518fdf3921241326ca3e8f8e55b8b"}, - {file = "ruff-0.11.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ef39f19cb8ec98cbc762344921e216f3857a06c47412030374fffd413fb8fd3a"}, - {file = "ruff-0.11.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2a7cedf47244f431fd11aa5a7e2806dda2e0c365873bda7834e8f7d785ae159"}, - {file = "ruff-0.11.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:81be52e7519f3d1a0beadcf8e974715b2dfc808ae8ec729ecfc79bddf8dbb783"}, - {file = "ruff-0.11.5-py3-none-win32.whl", hash = "sha256:e268da7b40f56e3eca571508a7e567e794f9bfcc0f412c4b607931d3af9c4afe"}, - {file = "ruff-0.11.5-py3-none-win_amd64.whl", hash = "sha256:6c6dc38af3cfe2863213ea25b6dc616d679205732dc0fb673356c2d69608f800"}, - {file = "ruff-0.11.5-py3-none-win_arm64.whl", hash = "sha256:67e241b4314f4eacf14a601d586026a962f4002a475aa702c69980a38087aa4e"}, - {file = "ruff-0.11.5.tar.gz", hash = "sha256:cae2e2439cb88853e421901ec040a758960b576126dab520fa08e9de431d1bef"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] [[package]] @@ -2271,4 +2264,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "6ee0e0fc89cb79dfb3ca7a88457b39073a0c3d009e618a1fc4f9bfabc0b0e89b" +content-hash = "9a44e720f99e6df236ed3540a290a268409c157b29fdab3a2d285d4dcac10429" diff --git a/pyproject.toml b/pyproject.toml index d7dce0254..e83291465 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,14 +56,14 @@ typing_extensions = ">= 4.0.0" ujson = ">=5.8.0" xmljson = "0.2.1" -[tool.poetry.group.dev.dependencies] -mypy = "==1.13.0" +[tool.poetry.dev-dependencies] +mypy = "1.0.1" pytest = "^7.4.0" pytest-asyncio = "^0.23.5" python-dateutil = "^2.9.0" types-python-dateutil = "^2.9.0.20240316" respx = "^0.22.0" -ruff = "==0.11.5" +ruff = "^0.5.6" [tool.pytest.ini_options] testpaths = [ "tests" ] @@ -75,26 +75,6 @@ plugins = ["pydantic.mypy"] [tool.ruff] line-length = 120 -[tool.ruff.lint] -select = [ - "E", # pycodestyle errors - "F", # pyflakes - "I", # isort -] -ignore = [ - "E402", # Module level import not at top of file - "E501", # Line too long - "E711", # Comparison to `None` should be `cond is not None` - "E712", # Avoid equality comparisons to `True`; use `if ...:` checks - "E721", # Use `is` and `is not` for type comparisons, or `isinstance()` for insinstance checks - "E722", # Do not use bare `except` - "E731", # Do not assign a `lambda` expression, use a `def` - "F821", # Undefined name - "F841" # Local variable ... is assigned to but never used -] - -[tool.ruff.lint.isort] -section-order = ["future", "standard-library", "third-party", "first-party"] [build-system] requires = ["poetry-core"] diff --git a/requirements.txt b/requirements.txt index bc8667a07..80e1ba6d9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,16 +3,16 @@ appdirs>=1.4.3 datamodel-code-generator==0.26.1 httpx>=0.21.2 ijson>=3.2.3 -jsf==0.11.2 +jsf==^0.11.2 jsonschema>=4.23.0 lxml>=4.2.5 -nltk==3.9.1 +nltk==^3.9.1 numpy>=1.26.4,<3.0.0 -opencv-python==4.9.0 +opencv-python==^4.9.0 pandas>=0.24.0 pydantic>= 1.9.2 -pydantic-core==2.18.2 -pyjwt==2.10.1 +pydantic-core==^2.18.2 +pyjwt==^2.10.1 requests>=2.22.0 requests-mock==1.12.1 typing_extensions>= 4.0.0 diff --git a/src/label_studio_sdk/__init__.py b/src/label_studio_sdk/__init__.py index 3fb01bd96..2159010c2 100644 --- a/src/label_studio_sdk/__init__.py +++ b/src/label_studio_sdk/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ( AccessTokenResponse, Annotation, diff --git a/src/label_studio_sdk/actions/__init__.py b/src/label_studio_sdk/actions/__init__.py index 574cdb2ce..99e4531ac 100644 --- a/src/label_studio_sdk/actions/__init__.py +++ b/src/label_studio_sdk/actions/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ( ActionsCreateRequestFilters, ActionsCreateRequestFiltersConjunction, diff --git a/src/label_studio_sdk/actions/client.py b/src/label_studio_sdk/actions/client.py index 01f45e2ba..1624f4cdc 100644 --- a/src/label_studio_sdk/actions/client.py +++ b/src/label_studio_sdk/actions/client.py @@ -1,14 +1,18 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions -from .raw_client import AsyncRawActionsClient, RawActionsClient -from .types.actions_create_request_filters import ActionsCreateRequestFilters +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.actions_create_request_id import ActionsCreateRequestId +from .types.actions_create_request_filters import ActionsCreateRequestFilters +from .types.actions_create_request_selected_items import ( + ActionsCreateRequestSelectedItems, +) from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem -from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,18 +20,7 @@ class ActionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawActionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawActionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawActionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -51,8 +44,18 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> No ) client.actions.list() """ - _response = self._raw_client.list(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -128,32 +131,45 @@ def create( ordering=["tasks:total_annotations"], ) """ - _response = self._raw_client.create( - id=id, - project=project, - view=view, - filters=filters, - selected_items=selected_items, - ordering=ordering, + _response = self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="POST", + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, + annotation=ActionsCreateRequestFilters, + direction="write", + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, + annotation=ActionsCreateRequestSelectedItems, + direction="write", + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncActionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawActionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawActionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawActionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -185,8 +201,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -270,13 +296,37 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - id=id, - project=project, - view=view, - filters=filters, - selected_items=selected_items, - ordering=ordering, + _response = await self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="POST", + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, + annotation=ActionsCreateRequestFilters, + direction="write", + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, + annotation=ActionsCreateRequestSelectedItems, + direction="write", + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/actions/raw_client.py b/src/label_studio_sdk/actions/raw_client.py deleted file mode 100644 index 5c45e77ef..000000000 --- a/src/label_studio_sdk/actions/raw_client.py +++ /dev/null @@ -1,223 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.request_options import RequestOptions -from ..core.serialization import convert_and_respect_annotation_metadata -from .types.actions_create_request_filters import ActionsCreateRequestFilters -from .types.actions_create_request_id import ActionsCreateRequestId -from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem -from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawActionsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - Retrieve all the registered actions with descriptions that data manager can use. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - id: ActionsCreateRequestId, - project: int, - view: typing.Optional[int] = None, - filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, - selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, - ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` - - Parameters - ---------- - id : ActionsCreateRequestId - Action name ID, see the full list of actions in the `GET api/actions` request - - project : int - Project ID - - view : typing.Optional[int] - View ID (optional, it has higher priority than filters, selectedItems and ordering from the request body payload) - - filters : typing.Optional[ActionsCreateRequestFilters] - Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` - - selected_items : typing.Optional[ActionsCreateRequestSelectedItems] - Task selection by IDs. If filters are applied, the selection will be applied to the filtered tasks.If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` - - ordering : typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] - List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="POST", - params={ - "id": id, - "project": project, - "view": view, - }, - json={ - "filters": convert_and_respect_annotation_metadata( - object_=filters, annotation=ActionsCreateRequestFilters, direction="write" - ), - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" - ), - "ordering": ordering, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawActionsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[None]: - """ - Retrieve all the registered actions with descriptions that data manager can use. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - id: ActionsCreateRequestId, - project: int, - view: typing.Optional[int] = None, - filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, - selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, - ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` - - Parameters - ---------- - id : ActionsCreateRequestId - Action name ID, see the full list of actions in the `GET api/actions` request - - project : int - Project ID - - view : typing.Optional[int] - View ID (optional, it has higher priority than filters, selectedItems and ordering from the request body payload) - - filters : typing.Optional[ActionsCreateRequestFilters] - Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` - - selected_items : typing.Optional[ActionsCreateRequestSelectedItems] - Task selection by IDs. If filters are applied, the selection will be applied to the filtered tasks.If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` - - ordering : typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] - List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="POST", - params={ - "id": id, - "project": project, - "view": view, - }, - json={ - "filters": convert_and_respect_annotation_metadata( - object_=filters, annotation=ActionsCreateRequestFilters, direction="write" - ), - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" - ), - "ordering": ordering, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/actions/types/__init__.py b/src/label_studio_sdk/actions/types/__init__.py index 18d6b19ea..f44d52635 100644 --- a/src/label_studio_sdk/actions/types/__init__.py +++ b/src/label_studio_sdk/actions/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .actions_create_request_filters import ActionsCreateRequestFilters from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction from .actions_create_request_filters_items_item import ActionsCreateRequestFiltersItemsItem diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters.py b/src/label_studio_sdk/actions/types/actions_create_request_filters.py index 1a20a01c2..85e1e9ec9 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters.py @@ -1,11 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ...core.pydantic_utilities import UniversalBaseModel +from .actions_create_request_filters_conjunction import ( + ActionsCreateRequestFiltersConjunction, +) import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction -from .actions_create_request_filters_items_item import ActionsCreateRequestFiltersItemsItem +import typing +from .actions_create_request_filters_items_item import ( + ActionsCreateRequestFiltersItemsItem, +) +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py index c5442ec10..aa09d841e 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py @@ -1,12 +1,18 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ...core.pydantic_utilities import UniversalBaseModel +from .actions_create_request_filters_items_item_filter import ( + ActionsCreateRequestFiltersItemsItemFilter, +) import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .actions_create_request_filters_items_item_filter import ActionsCreateRequestFiltersItemsItemFilter -from .actions_create_request_filters_items_item_operator import ActionsCreateRequestFiltersItemsItemOperator -from .actions_create_request_filters_items_item_value import ActionsCreateRequestFiltersItemsItemValue +from .actions_create_request_filters_items_item_operator import ( + ActionsCreateRequestFiltersItemsItemOperator, +) +from .actions_create_request_filters_items_item_value import ( + ActionsCreateRequestFiltersItemsItemValue, +) +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class ActionsCreateRequestFiltersItemsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py index 2ceac5aac..88f37db12 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py @@ -1,9 +1,12 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from .actions_create_request_selected_items_excluded import ActionsCreateRequestSelectedItemsExcluded -from .actions_create_request_selected_items_included import ActionsCreateRequestSelectedItemsIncluded +from .actions_create_request_selected_items_included import ( + ActionsCreateRequestSelectedItemsIncluded, +) +from .actions_create_request_selected_items_excluded import ( + ActionsCreateRequestSelectedItemsExcluded, +) ActionsCreateRequestSelectedItems = typing.Union[ ActionsCreateRequestSelectedItemsIncluded, ActionsCreateRequestSelectedItemsExcluded diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py index 89b9d4466..e1d15e854 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import typing - -import pydantic +from ...core.pydantic_utilities import UniversalBaseModel import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata +import pydantic +import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestSelectedItemsExcluded(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py index 7d943e6c5..90f2ec326 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import typing - -import pydantic +from ...core.pydantic_utilities import UniversalBaseModel import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata +import pydantic +import typing +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestSelectedItemsIncluded(UniversalBaseModel): diff --git a/src/label_studio_sdk/annotations/__init__.py b/src/label_studio_sdk/annotations/__init__.py index 51c6a9415..fb830a2b1 100644 --- a/src/label_studio_sdk/annotations/__init__.py +++ b/src/label_studio_sdk/annotations/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import AnnotationsCreateBulkRequestSelectedItems, AnnotationsCreateBulkResponseItem __all__ = ["AnnotationsCreateBulkRequestSelectedItems", "AnnotationsCreateBulkResponseItem"] diff --git a/src/label_studio_sdk/annotations/client.py b/src/label_studio_sdk/annotations/client.py index f6c692d23..61993721e 100644 --- a/src/label_studio_sdk/annotations/client.py +++ b/src/label_studio_sdk/annotations/client.py @@ -1,13 +1,21 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.annotation import Annotation -from .raw_client import AsyncRawAnnotationsClient, RawAnnotationsClient -from .types.annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems -from .types.annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from .types.annotations_create_bulk_request_selected_items import ( + AnnotationsCreateBulkRequestSelectedItems, +) +from .types.annotations_create_bulk_response_item import ( + AnnotationsCreateBulkResponseItem, +) +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,18 +23,7 @@ class AnnotationsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawAnnotationsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawAnnotationsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawAnnotationsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ @@ -59,8 +56,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -94,8 +107,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -187,19 +210,38 @@ def update( ground_truth=True, ) """ - _response = self._raw_client.update( - id, - result=result, - task=task, - project=project, - completed_by=completed_by, - updated_by=updated_by, - was_cancelled=was_cancelled, - ground_truth=ground_truth, - lead_time=lead_time, + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Annotation]: """ @@ -232,8 +274,24 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.list(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -338,19 +396,38 @@ def create( ground_truth=True, ) """ - _response = self._raw_client.create( - id, - result=result, - task=task, - project=project, - completed_by=completed_by, - updated_by=updated_by, - was_cancelled=was_cancelled, - ground_truth=ground_truth, - lead_time=lead_time, + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="POST", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create_bulk( self, @@ -394,31 +471,44 @@ def create_bulk( ) client.annotations.create_bulk() """ - _response = self._raw_client.create_bulk( - tasks=tasks, - selected_items=selected_items, - lead_time=lead_time, - project=project, - result=result, + _response = self._client_wrapper.httpx_client.request( + "api/annotations/bulk", + method="POST", + json={ + "tasks": tasks, + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, + annotation=AnnotationsCreateBulkRequestSelectedItems, + direction="write", + ), + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncAnnotationsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawAnnotationsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawAnnotationsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawAnnotationsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ @@ -459,8 +549,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -502,8 +608,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -603,19 +719,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - result=result, - task=task, - project=project, - completed_by=completed_by, - updated_by=updated_by, - was_cancelled=was_cancelled, - ground_truth=ground_truth, - lead_time=lead_time, + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -658,8 +793,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -772,19 +923,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - id, - result=result, - task=task, - project=project, - completed_by=completed_by, - updated_by=updated_by, - was_cancelled=was_cancelled, - ground_truth=ground_truth, - lead_time=lead_time, + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="POST", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create_bulk( self, @@ -836,12 +1006,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create_bulk( - tasks=tasks, - selected_items=selected_items, - lead_time=lead_time, - project=project, - result=result, + _response = await self._client_wrapper.httpx_client.request( + "api/annotations/bulk", + method="POST", + json={ + "tasks": tasks, + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, + annotation=AnnotationsCreateBulkRequestSelectedItems, + direction="write", + ), + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/annotations/raw_client.py b/src/label_studio_sdk/annotations/raw_client.py deleted file mode 100644 index e6e2a2650..000000000 --- a/src/label_studio_sdk/annotations/raw_client.py +++ /dev/null @@ -1,794 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..core.serialization import convert_and_respect_annotation_metadata -from ..types.annotation import Annotation -from .types.annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems -from .types.annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawAnnotationsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Annotation]: - """ - - Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. - - You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. - - Parameters - ---------- - id : int - A unique integer value identifying this annotation. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Annotation] - Retrieved annotation - """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete an annotation. - - This action can't be undone! - - You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. - - Parameters - ---------- - id : int - A unique integer value identifying this annotation. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - task: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - completed_by: typing.Optional[int] = OMIT, - updated_by: typing.Optional[int] = OMIT, - was_cancelled: typing.Optional[bool] = OMIT, - ground_truth: typing.Optional[bool] = OMIT, - lead_time: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Annotation]: - """ - - Update attributes for an existing annotation. - - You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. - - For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). - - Parameters - ---------- - id : int - A unique integer value identifying this annotation. - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) - - task : typing.Optional[int] - Corresponding task for this annotation - - project : typing.Optional[int] - Project ID for this annotation - - completed_by : typing.Optional[int] - User ID of the person who created this annotation - - updated_by : typing.Optional[int] - Last user who updated this annotation - - was_cancelled : typing.Optional[bool] - User skipped the task - - ground_truth : typing.Optional[bool] - This annotation is a Ground Truth - - lead_time : typing.Optional[float] - How much time it took to annotate the task (in seconds) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Annotation] - Updated annotation - """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def list( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[Annotation]]: - """ - - List all annotations for a task. - - You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). - - Parameters - ---------- - id : int - Task ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Annotation]] - Annotation - """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Annotation], - parse_obj_as( - type_=typing.List[Annotation], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - id: int, - *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - task: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - completed_by: typing.Optional[int] = OMIT, - updated_by: typing.Optional[int] = OMIT, - was_cancelled: typing.Optional[bool] = OMIT, - ground_truth: typing.Optional[bool] = OMIT, - lead_time: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Annotation]: - """ - - Add annotations to a task like an annotator does. - - You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). - - - The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST - request to send an empty annotation with the ID of the user who completed the task: - - ```json - { - "result": {}, - "was_cancelled": true, - "ground_truth": true, - "lead_time": 0, - "task": 0 - "completed_by": 123 - } - ``` - - Parameters - ---------- - id : int - Task ID - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) - - task : typing.Optional[int] - Corresponding task for this annotation - - project : typing.Optional[int] - Project ID for this annotation - - completed_by : typing.Optional[int] - User ID of the person who created this annotation - - updated_by : typing.Optional[int] - Last user who updated this annotation - - was_cancelled : typing.Optional[bool] - User skipped the task - - ground_truth : typing.Optional[bool] - This annotation is a Ground Truth - - lead_time : typing.Optional[float] - How much time it took to annotate the task (in seconds) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Annotation] - Created annotation - """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="POST", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create_bulk( - self, - *, - tasks: typing.Optional[typing.Sequence[int]] = OMIT, - selected_items: typing.Optional[AnnotationsCreateBulkRequestSelectedItems] = OMIT, - lead_time: typing.Optional[float] = OMIT, - project: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[typing.List[AnnotationsCreateBulkResponseItem]]: - """ - Create multiple annotations for specific tasks in a bulk operation. - - Parameters - ---------- - tasks : typing.Optional[typing.Sequence[int]] - - selected_items : typing.Optional[AnnotationsCreateBulkRequestSelectedItems] - - lead_time : typing.Optional[float] - - project : typing.Optional[int] - - result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[AnnotationsCreateBulkResponseItem]] - Annotations created successfully - """ - _response = self._client_wrapper.httpx_client.request( - "api/annotations/bulk", - method="POST", - json={ - "tasks": tasks, - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" - ), - "lead_time": lead_time, - "project": project, - "result": result, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[AnnotationsCreateBulkResponseItem], - parse_obj_as( - type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawAnnotationsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Annotation]: - """ - - Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. - - You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. - - Parameters - ---------- - id : int - A unique integer value identifying this annotation. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Annotation] - Retrieved annotation - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete an annotation. - - This action can't be undone! - - You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. - - Parameters - ---------- - id : int - A unique integer value identifying this annotation. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - task: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - completed_by: typing.Optional[int] = OMIT, - updated_by: typing.Optional[int] = OMIT, - was_cancelled: typing.Optional[bool] = OMIT, - ground_truth: typing.Optional[bool] = OMIT, - lead_time: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Annotation]: - """ - - Update attributes for an existing annotation. - - You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. - - For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). - - Parameters - ---------- - id : int - A unique integer value identifying this annotation. - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) - - task : typing.Optional[int] - Corresponding task for this annotation - - project : typing.Optional[int] - Project ID for this annotation - - completed_by : typing.Optional[int] - User ID of the person who created this annotation - - updated_by : typing.Optional[int] - Last user who updated this annotation - - was_cancelled : typing.Optional[bool] - User skipped the task - - ground_truth : typing.Optional[bool] - This annotation is a Ground Truth - - lead_time : typing.Optional[float] - How much time it took to annotate the task (in seconds) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Annotation] - Updated annotation - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def list( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[Annotation]]: - """ - - List all annotations for a task. - - You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). - - Parameters - ---------- - id : int - Task ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Annotation]] - Annotation - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Annotation], - parse_obj_as( - type_=typing.List[Annotation], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - id: int, - *, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - task: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - completed_by: typing.Optional[int] = OMIT, - updated_by: typing.Optional[int] = OMIT, - was_cancelled: typing.Optional[bool] = OMIT, - ground_truth: typing.Optional[bool] = OMIT, - lead_time: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Annotation]: - """ - - Add annotations to a task like an annotator does. - - You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). - - - The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST - request to send an empty annotation with the ID of the user who completed the task: - - ```json - { - "result": {}, - "was_cancelled": true, - "ground_truth": true, - "lead_time": 0, - "task": 0 - "completed_by": 123 - } - ``` - - Parameters - ---------- - id : int - Task ID - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) - - task : typing.Optional[int] - Corresponding task for this annotation - - project : typing.Optional[int] - Project ID for this annotation - - completed_by : typing.Optional[int] - User ID of the person who created this annotation - - updated_by : typing.Optional[int] - Last user who updated this annotation - - was_cancelled : typing.Optional[bool] - User skipped the task - - ground_truth : typing.Optional[bool] - This annotation is a Ground Truth - - lead_time : typing.Optional[float] - How much time it took to annotate the task (in seconds) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Annotation] - Created annotation - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="POST", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create_bulk( - self, - *, - tasks: typing.Optional[typing.Sequence[int]] = OMIT, - selected_items: typing.Optional[AnnotationsCreateBulkRequestSelectedItems] = OMIT, - lead_time: typing.Optional[float] = OMIT, - project: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[typing.List[AnnotationsCreateBulkResponseItem]]: - """ - Create multiple annotations for specific tasks in a bulk operation. - - Parameters - ---------- - tasks : typing.Optional[typing.Sequence[int]] - - selected_items : typing.Optional[AnnotationsCreateBulkRequestSelectedItems] - - lead_time : typing.Optional[float] - - project : typing.Optional[int] - - result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[AnnotationsCreateBulkResponseItem]] - Annotations created successfully - """ - _response = await self._client_wrapper.httpx_client.request( - "api/annotations/bulk", - method="POST", - json={ - "tasks": tasks, - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" - ), - "lead_time": lead_time, - "project": project, - "result": result, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[AnnotationsCreateBulkResponseItem], - parse_obj_as( - type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/annotations/types/__init__.py b/src/label_studio_sdk/annotations/types/__init__.py index 4bacc3ec7..0ec046f76 100644 --- a/src/label_studio_sdk/annotations/types/__init__.py +++ b/src/label_studio_sdk/annotations/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems from .annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem diff --git a/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py b/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py index 5a1c02d68..c3209d6c3 100644 --- a/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py +++ b/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import typing - -import pydantic +from ...core.pydantic_utilities import UniversalBaseModel import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import typing from ...core.serialization import FieldMetadata +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class AnnotationsCreateBulkRequestSelectedItems(UniversalBaseModel): diff --git a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py index 856097dc3..a023f4f3d 100644 --- a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +++ b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - +from ...core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AnnotationsCreateBulkResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/base_client.py b/src/label_studio_sdk/base_client.py index c5e01f8ba..ade8eacb8 100644 --- a/src/label_studio_sdk/base_client.py +++ b/src/label_studio_sdk/base_client.py @@ -1,31 +1,50 @@ # This file was auto-generated by Fern from our API Definition. -import os import typing - +from .environment import LabelStudioEnvironment +import os import httpx -from .actions.client import ActionsClient, AsyncActionsClient -from .annotations.client import AnnotationsClient, AsyncAnnotationsClient -from .comments.client import AsyncCommentsClient, CommentsClient from .core.api_error import ApiError -from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .environment import LabelStudioEnvironment -from .export_storage.client import AsyncExportStorageClient, ExportStorageClient -from .files.client import AsyncFilesClient, FilesClient -from .import_storage.client import AsyncImportStorageClient, ImportStorageClient -from .jwt_settings.client import AsyncJwtSettingsClient, JwtSettingsClient -from .ml.client import AsyncMlClient, MlClient -from .model_providers.client import AsyncModelProvidersClient, ModelProvidersClient -from .predictions.client import AsyncPredictionsClient, PredictionsClient -from .projects.client import AsyncProjectsClient, ProjectsClient -from .prompts.client import AsyncPromptsClient, PromptsClient -from .tasks.client import AsyncTasksClient, TasksClient -from .tokens.client import AsyncTokensClient, TokensClient -from .users.client import AsyncUsersClient, UsersClient -from .versions.client import AsyncVersionsClient, VersionsClient -from .views.client import AsyncViewsClient, ViewsClient -from .webhooks.client import AsyncWebhooksClient, WebhooksClient -from .workspaces.client import AsyncWorkspacesClient, WorkspacesClient +from .core.client_wrapper import SyncClientWrapper +from .annotations.client import AnnotationsClient +from .users.client import UsersClient +from .actions.client import ActionsClient +from .views.client import ViewsClient +from .files.client import FilesClient +from .ml.client import MlClient +from .predictions.client import PredictionsClient +from .projects.client import ProjectsClient +from .tasks.client import TasksClient +from .import_storage.client import ImportStorageClient +from .export_storage.client import ExportStorageClient +from .webhooks.client import WebhooksClient +from .versions.client import VersionsClient +from .prompts.client import PromptsClient +from .model_providers.client import ModelProvidersClient +from .comments.client import CommentsClient +from .workspaces.client import WorkspacesClient +from .tokens.client import TokensClient +from .jwt_settings.client import JwtSettingsClient +from .core.client_wrapper import AsyncClientWrapper +from .annotations.client import AsyncAnnotationsClient +from .users.client import AsyncUsersClient +from .actions.client import AsyncActionsClient +from .views.client import AsyncViewsClient +from .files.client import AsyncFilesClient +from .ml.client import AsyncMlClient +from .predictions.client import AsyncPredictionsClient +from .projects.client import AsyncProjectsClient +from .tasks.client import AsyncTasksClient +from .import_storage.client import AsyncImportStorageClient +from .export_storage.client import AsyncExportStorageClient +from .webhooks.client import AsyncWebhooksClient +from .versions.client import AsyncVersionsClient +from .prompts.client import AsyncPromptsClient +from .model_providers.client import AsyncModelProvidersClient +from .comments.client import AsyncCommentsClient +from .workspaces.client import AsyncWorkspacesClient +from .tokens.client import AsyncTokensClient +from .jwt_settings.client import AsyncJwtSettingsClient class LabelStudioBase: diff --git a/src/label_studio_sdk/comments/__init__.py b/src/label_studio_sdk/comments/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/comments/__init__.py +++ b/src/label_studio_sdk/comments/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/comments/client.py b/src/label_studio_sdk/comments/client.py index 2a30a7ef3..dec7e7a72 100644 --- a/src/label_studio_sdk/comments/client.py +++ b/src/label_studio_sdk/comments/client.py @@ -1,11 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.comment import Comment -from .raw_client import AsyncRawCommentsClient, RawCommentsClient +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.jsonable_encoder import jsonable_encoder +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -13,18 +16,7 @@ class CommentsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawCommentsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawCommentsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawCommentsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( self, @@ -66,10 +58,29 @@ def list( ) client.comments.list() """ - _response = self._raw_client.list( - project=project, expand_created_by=expand_created_by, annotation=annotation, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + "api/comments/", + method="GET", + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -111,10 +122,34 @@ def create( ) client.comments.create() """ - _response = self._raw_client.create( - annotation=annotation, project=project, text=text, is_resolved=is_resolved, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + "api/comments/", + method="POST", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ @@ -145,8 +180,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -176,8 +227,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -225,31 +286,39 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - annotation=annotation, - project=project, - text=text, - is_resolved=is_resolved, + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="PATCH", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncCommentsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawCommentsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawCommentsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawCommentsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( self, @@ -299,10 +368,29 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list( - project=project, expand_created_by=expand_created_by, annotation=annotation, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + "api/comments/", + method="GET", + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -352,10 +440,34 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - annotation=annotation, project=project, text=text, is_resolved=is_resolved, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + "api/comments/", + method="POST", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ @@ -394,8 +506,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -433,8 +561,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -490,12 +628,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - annotation=annotation, - project=project, - text=text, - is_resolved=is_resolved, + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="PATCH", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/comments/raw_client.py b/src/label_studio_sdk/comments/raw_client.py deleted file mode 100644 index c60535a93..000000000 --- a/src/label_studio_sdk/comments/raw_client.py +++ /dev/null @@ -1,529 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.comment import Comment - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawCommentsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, - *, - project: typing.Optional[int] = None, - expand_created_by: typing.Optional[bool] = None, - annotation: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[typing.List[Comment]]: - """ - - Get a list of comments for a specific project. - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - expand_created_by : typing.Optional[bool] - Expand the created_by field with object instead of ID - - annotation : typing.Optional[int] - Annotation ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Comment]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/comments/", - method="GET", - params={ - "project": project, - "expand_created_by": expand_created_by, - "annotation": annotation, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Comment], - parse_obj_as( - type_=typing.List[Comment], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - annotation: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - text: typing.Optional[str] = OMIT, - is_resolved: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Comment]: - """ - - Create a new comment. - - Parameters - ---------- - annotation : typing.Optional[int] - - project : typing.Optional[int] - - text : typing.Optional[str] - - is_resolved : typing.Optional[bool] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Comment] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/comments/", - method="POST", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Comment]: - """ - - Get a specific comment. - - Parameters - ---------- - id : int - Comment ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Comment] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific comment. - - Parameters - ---------- - id : int - Comment ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - annotation: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - text: typing.Optional[str] = OMIT, - is_resolved: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Comment]: - """ - - Update a specific comment. - - Parameters - ---------- - id : int - Comment ID - - annotation : typing.Optional[int] - - project : typing.Optional[int] - - text : typing.Optional[str] - - is_resolved : typing.Optional[bool] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Comment] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="PATCH", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawCommentsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, - *, - project: typing.Optional[int] = None, - expand_created_by: typing.Optional[bool] = None, - annotation: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[typing.List[Comment]]: - """ - - Get a list of comments for a specific project. - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - expand_created_by : typing.Optional[bool] - Expand the created_by field with object instead of ID - - annotation : typing.Optional[int] - Annotation ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Comment]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/comments/", - method="GET", - params={ - "project": project, - "expand_created_by": expand_created_by, - "annotation": annotation, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Comment], - parse_obj_as( - type_=typing.List[Comment], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - annotation: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - text: typing.Optional[str] = OMIT, - is_resolved: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Comment]: - """ - - Create a new comment. - - Parameters - ---------- - annotation : typing.Optional[int] - - project : typing.Optional[int] - - text : typing.Optional[str] - - is_resolved : typing.Optional[bool] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Comment] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/comments/", - method="POST", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Comment]: - """ - - Get a specific comment. - - Parameters - ---------- - id : int - Comment ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Comment] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific comment. - - Parameters - ---------- - id : int - Comment ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - annotation: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - text: typing.Optional[str] = OMIT, - is_resolved: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Comment]: - """ - - Update a specific comment. - - Parameters - ---------- - id : int - Comment ID - - annotation : typing.Optional[int] - - project : typing.Optional[int] - - text : typing.Optional[str] - - is_resolved : typing.Optional[bool] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Comment] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="PATCH", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/core/__init__.py b/src/label_studio_sdk/core/__init__.py index d1461de7c..42031ad0b 100644 --- a/src/label_studio_sdk/core/__init__.py +++ b/src/label_studio_sdk/core/__init__.py @@ -1,13 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .api_error import ApiError from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper from .datetime_utils import serialize_datetime from .file import File, convert_file_dict_to_httpx_tuples, with_content_type from .http_client import AsyncHttpClient, HttpClient -from .http_response import AsyncHttpResponse, HttpResponse from .jsonable_encoder import jsonable_encoder from .pagination import AsyncPager, SyncPager from .pydantic_utilities import ( @@ -28,13 +25,11 @@ "ApiError", "AsyncClientWrapper", "AsyncHttpClient", - "AsyncHttpResponse", "AsyncPager", "BaseClientWrapper", "FieldMetadata", "File", "HttpClient", - "HttpResponse", "IS_PYDANTIC_V2", "RequestOptions", "SyncClientWrapper", diff --git a/src/label_studio_sdk/core/api_error.py b/src/label_studio_sdk/core/api_error.py index 6f850a60c..2e9fc5431 100644 --- a/src/label_studio_sdk/core/api_error.py +++ b/src/label_studio_sdk/core/api_error.py @@ -1,23 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -from typing import Any, Dict, Optional +import typing class ApiError(Exception): - headers: Optional[Dict[str, str]] - status_code: Optional[int] - body: Any + status_code: typing.Optional[int] + body: typing.Any - def __init__( - self, - *, - headers: Optional[Dict[str, str]] = None, - status_code: Optional[int] = None, - body: Any = None, - ) -> None: - self.headers = headers + def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None): self.status_code = status_code self.body = body def __str__(self) -> str: - return f"headers: {self.headers}, status_code: {self.status_code}, body: {self.body}" + return f"status_code: {self.status_code}, body: {self.body}" diff --git a/src/label_studio_sdk/core/file.py b/src/label_studio_sdk/core/file.py index 44b0d27c0..c6d11fc70 100644 --- a/src/label_studio_sdk/core/file.py +++ b/src/label_studio_sdk/core/file.py @@ -58,7 +58,8 @@ def with_content_type(*, file: File, default_content_type: str) -> File: return (filename, content, out_content_type) elif len(file) == 4: filename, content, file_content_type, headers = cast( # type: ignore - Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], + file, ) out_content_type = file_content_type or default_content_type return (filename, content, out_content_type, headers) diff --git a/src/label_studio_sdk/core/force_multipart.py b/src/label_studio_sdk/core/force_multipart.py deleted file mode 100644 index ae24ccff1..000000000 --- a/src/label_studio_sdk/core/force_multipart.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - - -class ForceMultipartDict(dict): - """ - A dictionary subclass that always evaluates to True in boolean contexts. - - This is used to force multipart/form-data encoding in HTTP requests even when - the dictionary is empty, which would normally evaluate to False. - """ - - def __bool__(self): - return True - - -FORCE_MULTIPART = ForceMultipartDict() diff --git a/src/label_studio_sdk/core/http_client.py b/src/label_studio_sdk/core/http_client.py index e4173f990..275a54cc8 100644 --- a/src/label_studio_sdk/core/http_client.py +++ b/src/label_studio_sdk/core/http_client.py @@ -2,6 +2,7 @@ import asyncio import email.utils +import json import re import time import typing @@ -10,13 +11,12 @@ from random import random import httpx + from .file import File, convert_file_dict_to_httpx_tuples -from .force_multipart import FORCE_MULTIPART from .jsonable_encoder import jsonable_encoder from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions -from httpx._types import RequestFiles INITIAL_RETRY_DELAY_SECONDS = 0.5 MAX_RETRY_DELAY_SECONDS = 10 @@ -180,17 +180,11 @@ def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[ - typing.Union[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], - typing.List[typing.Tuple[str, File]], - ] - ] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, - force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -201,15 +195,6 @@ def request( json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) - request_files: typing.Optional[RequestFiles] = ( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit and isinstance(files, dict)) - else None - ) - - if (request_files is None or len(request_files) == 0) and force_multipart: - request_files = FORCE_MULTIPART - response = self.httpx_client.request( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -242,7 +227,11 @@ def request( json=json_body, data=data_body, content=content, - files=request_files, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit) + else None + ), timeout=timeout, ) @@ -277,17 +266,11 @@ def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[ - typing.Union[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], - typing.List[typing.Tuple[str, File]], - ] - ] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, - force_multipart: typing.Optional[bool] = None, ) -> typing.Iterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -296,15 +279,6 @@ def stream( else self.base_timeout() ) - request_files: typing.Optional[RequestFiles] = ( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit and isinstance(files, dict)) - else None - ) - - if (request_files is None or len(request_files) == 0) and force_multipart: - request_files = FORCE_MULTIPART - json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) with self.httpx_client.stream( @@ -339,7 +313,11 @@ def stream( json=json_body, data=data_body, content=content, - files=request_files, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit) + else None + ), timeout=timeout, ) as stream: yield stream @@ -378,17 +356,11 @@ async def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[ - typing.Union[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], - typing.List[typing.Tuple[str, File]], - ] - ] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, - force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -397,15 +369,6 @@ async def request( else self.base_timeout() ) - request_files: typing.Optional[RequestFiles] = ( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit and isinstance(files, dict)) - else None - ) - - if (request_files is None or len(request_files) == 0) and force_multipart: - request_files = FORCE_MULTIPART - json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) # Add the input to each of these and do None-safety checks @@ -441,7 +404,11 @@ async def request( json=json_body, data=data_body, content=content, - files=request_files, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if files is not None + else None + ), timeout=timeout, ) @@ -475,17 +442,11 @@ async def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[ - typing.Union[ - typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], - typing.List[typing.Tuple[str, File]], - ] - ] = None, + files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, - force_multipart: typing.Optional[bool] = None, ) -> typing.AsyncIterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -494,15 +455,6 @@ async def stream( else self.base_timeout() ) - request_files: typing.Optional[RequestFiles] = ( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit and isinstance(files, dict)) - else None - ) - - if (request_files is None or len(request_files) == 0) and force_multipart: - request_files = FORCE_MULTIPART - json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) async with self.httpx_client.stream( @@ -537,7 +489,11 @@ async def stream( json=json_body, data=data_body, content=content, - files=request_files, + files=( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if files is not None + else None + ), timeout=timeout, ) as stream: yield stream diff --git a/src/label_studio_sdk/core/http_response.py b/src/label_studio_sdk/core/http_response.py deleted file mode 100644 index 48a1798a5..000000000 --- a/src/label_studio_sdk/core/http_response.py +++ /dev/null @@ -1,55 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from typing import Dict, Generic, TypeVar - -import httpx - -T = TypeVar("T") -"""Generic to represent the underlying type of the data wrapped by the HTTP response.""" - - -class BaseHttpResponse: - """Minimalist HTTP response wrapper that exposes response headers.""" - - _response: httpx.Response - - def __init__(self, response: httpx.Response): - self._response = response - - @property - def headers(self) -> Dict[str, str]: - return dict(self._response.headers) - - -class HttpResponse(Generic[T], BaseHttpResponse): - """HTTP response wrapper that exposes response headers and data.""" - - _data: T - - def __init__(self, response: httpx.Response, data: T): - super().__init__(response) - self._data = data - - @property - def data(self) -> T: - return self._data - - def close(self) -> None: - self._response.close() - - -class AsyncHttpResponse(Generic[T], BaseHttpResponse): - """HTTP response wrapper that exposes response headers and data.""" - - _data: T - - def __init__(self, response: httpx.Response, data: T): - super().__init__(response) - self._data = data - - @property - def data(self) -> T: - return self._data - - async def close(self) -> None: - await self._response.aclose() diff --git a/src/label_studio_sdk/core/jsonable_encoder.py b/src/label_studio_sdk/core/jsonable_encoder.py index afee3662d..1b631e901 100644 --- a/src/label_studio_sdk/core/jsonable_encoder.py +++ b/src/label_studio_sdk/core/jsonable_encoder.py @@ -17,6 +17,7 @@ from typing import Any, Callable, Dict, List, Optional, Set, Union import pydantic + from .datetime_utils import serialize_datetime from .pydantic_utilities import ( IS_PYDANTIC_V2, diff --git a/src/label_studio_sdk/core/pagination.py b/src/label_studio_sdk/core/pagination.py index 209a1ff14..74f8ae61c 100644 --- a/src/label_studio_sdk/core/pagination.py +++ b/src/label_studio_sdk/core/pagination.py @@ -1,14 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from __future__ import annotations +import typing -from dataclasses import dataclass -from typing import AsyncIterator, Awaitable, Callable, Generic, Iterator, List, Optional, TypeVar +from typing_extensions import Self -from .http_response import BaseHttpResponse +import pydantic -T = TypeVar("T") -"""Generic to represent the underlying type of the results within a page""" +# Generic to represent the underlying type of the results within a page +T = typing.TypeVar("T") # SDKs implement a Page ABC per-pagination request, the endpoint then returns a pager that wraps this type @@ -20,63 +19,70 @@ # # This should be the outer function that returns the SyncPager again # get_next=lambda: list(..., cursor: response.cursor) (or list(..., offset: offset + 1)) # ) +class BasePage(pydantic.BaseModel, typing.Generic[T]): + has_next: bool + items: typing.Optional[typing.List[T]] -@dataclass(frozen=True) -class SyncPager(Generic[T]): - get_next: Optional[Callable[[], Optional[SyncPager[T]]]] - has_next: bool - items: Optional[List[T]] - response: Optional[BaseHttpResponse] +class SyncPage(BasePage[T], typing.Generic[T]): + get_next: typing.Optional[typing.Callable[[], typing.Optional[Self]]] + + +class AsyncPage(BasePage[T], typing.Generic[T]): + get_next: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Optional[Self]]]] + +# ---------------------------- + + +class SyncPager(SyncPage[T], typing.Generic[T]): # Here we type ignore the iterator to avoid a mypy error # caused by the type conflict with Pydanitc's __iter__ method # brought in by extending the base model - def __iter__(self) -> Iterator[T]: # type: ignore[override] + def __iter__(self) -> typing.Iterator[T]: # type: ignore for page in self.iter_pages(): if page.items is not None: - yield from page.items - - def iter_pages(self) -> Iterator[SyncPager[T]]: - page: Optional[SyncPager[T]] = self - while page is not None: - yield page - - if not page.has_next or page.get_next is None: - return + for item in page.items: + yield item - page = page.get_next() - if page is None or page.items is None or len(page.items) == 0: + def iter_pages(self) -> typing.Iterator[SyncPage[T]]: + page: typing.Union[SyncPager[T], None] = self + while True: + if page is not None: + yield page + if page.has_next and page.get_next is not None: + page = page.get_next() + if page is None or page.items is None or len(page.items) == 0: + return + else: + return + else: return - def next_page(self) -> Optional[SyncPager[T]]: + def next_page(self) -> typing.Optional[SyncPage[T]]: return self.get_next() if self.get_next is not None else None -@dataclass(frozen=True) -class AsyncPager(Generic[T]): - get_next: Optional[Callable[[], Awaitable[Optional[AsyncPager[T]]]]] - has_next: bool - items: Optional[List[T]] - response: Optional[BaseHttpResponse] - - async def __aiter__(self) -> AsyncIterator[T]: +class AsyncPager(AsyncPage[T], typing.Generic[T]): + async def __aiter__(self) -> typing.AsyncIterator[T]: # type: ignore async for page in self.iter_pages(): if page.items is not None: for item in page.items: yield item - async def iter_pages(self) -> AsyncIterator[AsyncPager[T]]: - page: Optional[AsyncPager[T]] = self - while page is not None: - yield page - - if not page.has_next or page.get_next is None: - return - - page = await page.get_next() - if page is None or page.items is None or len(page.items) == 0: + async def iter_pages(self) -> typing.AsyncIterator[AsyncPage[T]]: + page: typing.Union[AsyncPager[T], None] = self + while True: + if page is not None: + yield page + if page is not None and page.has_next and page.get_next is not None: + page = await page.get_next() + if page is None or page.items is None or len(page.items) == 0: + return + else: + return + else: return - async def next_page(self) -> Optional[AsyncPager[T]]: + async def next_page(self) -> typing.Optional[AsyncPage[T]]: return await self.get_next() if self.get_next is not None else None diff --git a/src/label_studio_sdk/core/pydantic_utilities.py b/src/label_studio_sdk/core/pydantic_utilities.py index 0360ef49a..93a9d7867 100644 --- a/src/label_studio_sdk/core/pydantic_utilities.py +++ b/src/label_studio_sdk/core/pydantic_utilities.py @@ -2,65 +2,89 @@ # nopycln: file import datetime as dt +import typing from collections import defaultdict -from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast + +import typing_extensions import pydantic +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata + IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") if IS_PYDANTIC_V2: - from pydantic.v1.datetime_parse import parse_date as parse_date - from pydantic.v1.datetime_parse import parse_datetime as parse_datetime - from pydantic.v1.fields import ModelField as ModelField - from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined] - from pydantic.v1.typing import get_args as get_args - from pydantic.v1.typing import get_origin as get_origin - from pydantic.v1.typing import is_literal_type as is_literal_type - from pydantic.v1.typing import is_union as is_union + # isort will try to reformat the comments on these imports, which breaks mypy + # isort: off + from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_date as parse_date, + ) + from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_datetime as parse_datetime, + ) + from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + ENCODERS_BY_TYPE as encoders_by_type, + ) + from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + get_args as get_args, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + get_origin as get_origin, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_literal_type as is_literal_type, + ) + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_union as is_union, + ) + from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 else: - from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef] - from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef] - from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef] - from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef] - from pydantic.typing import get_args as get_args # type: ignore[no-redef] - from pydantic.typing import get_origin as get_origin # type: ignore[no-redef] - from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef] - from pydantic.typing import is_union as is_union # type: ignore[no-redef] + from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 + from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 + from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 + from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 + from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 + from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 + + # isort: on -from .datetime_utils import serialize_datetime -from .serialization import convert_and_respect_annotation_metadata -from typing_extensions import TypeAlias -T = TypeVar("T") -Model = TypeVar("Model", bound=pydantic.BaseModel) +T = typing.TypeVar("T") +Model = typing.TypeVar("Model", bound=pydantic.BaseModel) -def parse_obj_as(type_: Type[T], object_: Any) -> T: +def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") if IS_PYDANTIC_V2: - adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined] + adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 return adapter.validate_python(dealiased_object) - return pydantic.parse_obj_as(type_, dealiased_object) + else: + return pydantic.parse_obj_as(type_, dealiased_object) -def to_jsonable_with_fallback(obj: Any, fallback_serializer: Callable[[Any], Any]) -> Any: +def to_jsonable_with_fallback( + obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] +) -> typing.Any: if IS_PYDANTIC_V2: from pydantic_core import to_jsonable_python return to_jsonable_python(obj, fallback=fallback_serializer) - return fallback_serializer(obj) + else: + return fallback_serializer(obj) class UniversalBaseModel(pydantic.BaseModel): if IS_PYDANTIC_V2: - model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key] + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # Allow fields beginning with `model_` to be used in the model protected_namespaces=(), - ) + ) # type: ignore # Pydantic v2 - @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore[attr-defined] - def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> Any: # type: ignore[name-defined] + @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2 + def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2 serialized = handler(self) data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} return data @@ -72,28 +96,38 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} @classmethod - def model_construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": + def model_construct( + cls: typing.Type["Model"], + _fields_set: typing.Optional[typing.Set[str]] = None, + **values: typing.Any, + ) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") return cls.construct(_fields_set, **dealiased_object) @classmethod - def construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": + def construct( + cls: typing.Type["Model"], + _fields_set: typing.Optional[typing.Set[str]] = None, + **values: typing.Any, + ) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") if IS_PYDANTIC_V2: - return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc] - return super().construct(_fields_set, **dealiased_object) + return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2 + else: + return super().construct(_fields_set, **dealiased_object) - def json(self, **kwargs: Any) -> str: - kwargs_with_defaults = { + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { "by_alias": True, "exclude_unset": True, **kwargs, } if IS_PYDANTIC_V2: - return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc] - return super().json(**kwargs_with_defaults) + return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 + else: + return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: Any) -> Dict[str, Any]: + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: """ Override the default dict method to `exclude_unset` by default. This function patches `exclude_unset` to work include fields within non-None default values. @@ -104,21 +138,21 @@ def dict(self, **kwargs: Any) -> Dict[str, Any]: # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models # that we have less control over, and this is less intrusive than custom serializers for now. if IS_PYDANTIC_V2: - kwargs_with_defaults_exclude_unset = { + kwargs_with_defaults_exclude_unset: typing.Any = { **kwargs, "by_alias": True, "exclude_unset": True, "exclude_none": False, } - kwargs_with_defaults_exclude_none = { + kwargs_with_defaults_exclude_none: typing.Any = { **kwargs, "by_alias": True, "exclude_none": True, "exclude_unset": False, } dict_dump = deep_union_pydantic_dicts( - super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc] - super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc] + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 ) else: @@ -138,7 +172,7 @@ def dict(self, **kwargs: Any) -> Dict[str, Any]: if default is not None: self.__fields_set__.add(name) - kwargs_with_defaults_exclude_unset_include_fields = { + kwargs_with_defaults_exclude_unset_include_fields: typing.Any = { "by_alias": True, "exclude_unset": True, "include": _fields_set, @@ -150,10 +184,12 @@ def dict(self, **kwargs: Any) -> Dict[str, Any]: return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write") -def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]: - converted_list: List[Any] = [] +def _union_list_of_pydantic_dicts( + source: typing.List[typing.Any], destination: typing.List[typing.Any] +) -> typing.List[typing.Any]: + converted_list: typing.List[typing.Any] = [] for i, item in enumerate(source): - destination_value = destination[i] + destination_value = destination[i] # type: ignore if isinstance(item, dict): converted_list.append(deep_union_pydantic_dicts(item, destination_value)) elif isinstance(item, list): @@ -163,7 +199,9 @@ def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> return converted_list -def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any]) -> Dict[str, Any]: +def deep_union_pydantic_dicts( + source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] +) -> typing.Dict[str, typing.Any]: for key, value in source.items(): node = destination.setdefault(key, {}) if isinstance(value, dict): @@ -181,16 +219,18 @@ def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any if IS_PYDANTIC_V2: - class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg] + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 pass - UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc] + UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore else: - UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef] + UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore -def encode_by_type(o: Any) -> Any: - encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) +def encode_by_type(o: typing.Any) -> typing.Any: + encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = ( + defaultdict(tuple) + ) for type_, encoder in encoders_by_type.items(): encoders_by_class_tuples[encoder] += (type_,) @@ -201,49 +241,54 @@ def encode_by_type(o: Any) -> Any: return encoder(o) -def update_forward_refs(model: Type["Model"], **localns: Any) -> None: +def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: if IS_PYDANTIC_V2: - model.model_rebuild(raise_errors=False) # type: ignore[attr-defined] + model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2 else: model.update_forward_refs(**localns) # Mirrors Pydantic's internal typing -AnyCallable = Callable[..., Any] +AnyCallable = typing.Callable[..., typing.Any] def universal_root_validator( pre: bool = False, -) -> Callable[[AnyCallable], AnyCallable]: +) -> typing.Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return cast(AnyCallable, pydantic.model_validator(mode="before" if pre else "after")(func)) # type: ignore[attr-defined] - return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload] + return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 return decorator -def universal_field_validator(field_name: str, pre: bool = False) -> Callable[[AnyCallable], AnyCallable]: +def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return cast(AnyCallable, pydantic.field_validator(field_name, mode="before" if pre else "after")(func)) # type: ignore[attr-defined] - return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func)) + return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1 return decorator -PydanticField = Union[ModelField, pydantic.fields.FieldInfo] +PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo] -def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]: +def _get_model_fields( + model: typing.Type["Model"], +) -> typing.Mapping[str, PydanticField]: if IS_PYDANTIC_V2: - return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined] - return cast(Mapping[str, PydanticField], model.__fields__) + return model.model_fields # type: ignore # Pydantic v2 + else: + return model.__fields__ # type: ignore # Pydantic v1 -def _get_field_default(field: PydanticField) -> Any: +def _get_field_default(field: PydanticField) -> typing.Any: try: - value = field.get_default() # type: ignore[union-attr] + value = field.get_default() # type: ignore # Pydantic < v1.10.15 except: value = field.default if IS_PYDANTIC_V2: diff --git a/src/label_studio_sdk/core/serialization.py b/src/label_studio_sdk/core/serialization.py index c36e865cc..cb5dcbf93 100644 --- a/src/label_studio_sdk/core/serialization.py +++ b/src/label_studio_sdk/core/serialization.py @@ -4,9 +4,10 @@ import inspect import typing -import pydantic import typing_extensions +import pydantic + class FieldMetadata: """ @@ -160,12 +161,7 @@ def _convert_mapping( direction: typing.Literal["read", "write"], ) -> typing.Mapping[str, object]: converted_object: typing.Dict[str, object] = {} - try: - annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) - except NameError: - # The TypedDict contains a circular reference, so - # we use the __annotations__ attribute directly. - annotations = getattr(expected_type, "__annotations__", {}) + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) aliases_to_field_names = _get_alias_to_field_name(annotations) for key, value in object_.items(): if direction == "read" and key in aliases_to_field_names: diff --git a/src/label_studio_sdk/errors/__init__.py b/src/label_studio_sdk/errors/__init__.py index 27c365534..076c429b4 100644 --- a/src/label_studio_sdk/errors/__init__.py +++ b/src/label_studio_sdk/errors/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .bad_request_error import BadRequestError from .internal_server_error import InternalServerError from .not_found_error import NotFoundError diff --git a/src/label_studio_sdk/errors/bad_request_error.py b/src/label_studio_sdk/errors/bad_request_error.py index baf5be4f7..9c13c61f9 100644 --- a/src/label_studio_sdk/errors/bad_request_error.py +++ b/src/label_studio_sdk/errors/bad_request_error.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - from ..core.api_error import ApiError +import typing class BadRequestError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): - super().__init__(status_code=400, headers=headers, body=body) + def __init__(self, body: typing.Optional[typing.Any]): + super().__init__(status_code=400, body=body) diff --git a/src/label_studio_sdk/errors/internal_server_error.py b/src/label_studio_sdk/errors/internal_server_error.py index 2c9be920b..b4d235549 100644 --- a/src/label_studio_sdk/errors/internal_server_error.py +++ b/src/label_studio_sdk/errors/internal_server_error.py @@ -1,10 +1,8 @@ # This file was auto-generated by Fern from our API Definition. -import typing - from ..core.api_error import ApiError class InternalServerError(ApiError): - def __init__(self, body: str, headers: typing.Optional[typing.Dict[str, str]] = None): - super().__init__(status_code=500, headers=headers, body=body) + def __init__(self, body: str): + super().__init__(status_code=500, body=body) diff --git a/src/label_studio_sdk/errors/not_found_error.py b/src/label_studio_sdk/errors/not_found_error.py index dcd60e383..a1235b87f 100644 --- a/src/label_studio_sdk/errors/not_found_error.py +++ b/src/label_studio_sdk/errors/not_found_error.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - from ..core.api_error import ApiError +import typing class NotFoundError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): - super().__init__(status_code=404, headers=headers, body=body) + def __init__(self, body: typing.Optional[typing.Any]): + super().__init__(status_code=404, body=body) diff --git a/src/label_studio_sdk/errors/unauthorized_error.py b/src/label_studio_sdk/errors/unauthorized_error.py index c83b25c26..1c00f98ab 100644 --- a/src/label_studio_sdk/errors/unauthorized_error.py +++ b/src/label_studio_sdk/errors/unauthorized_error.py @@ -1,10 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - from ..core.api_error import ApiError +import typing class UnauthorizedError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): - super().__init__(status_code=401, headers=headers, body=body) + def __init__(self, body: typing.Optional[typing.Any]): + super().__init__(status_code=401, body=body) diff --git a/src/label_studio_sdk/export_storage/__init__.py b/src/label_studio_sdk/export_storage/__init__.py index 635b53022..0203a293b 100644 --- a/src/label_studio_sdk/export_storage/__init__.py +++ b/src/label_studio_sdk/export_storage/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ExportStorageListTypesResponseItem from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse diff --git a/src/label_studio_sdk/export_storage/azure/__init__.py b/src/label_studio_sdk/export_storage/azure/__init__.py index 97dcea344..323fc5f3d 100644 --- a/src/label_studio_sdk/export_storage/azure/__init__.py +++ b/src/label_studio_sdk/export_storage/azure/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import AzureCreateResponse, AzureUpdateResponse __all__ = ["AzureCreateResponse", "AzureUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/azure/client.py b/src/label_studio_sdk/export_storage/azure/client.py index c7314088a..4ef84db7f 100644 --- a/src/label_studio_sdk/export_storage/azure/client.py +++ b/src/label_studio_sdk/export_storage/azure/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_export_storage import AzureBlobExportStorage -from .raw_client import AsyncRawAzureClient, RawAzureClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.azure_create_response import AzureCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class AzureClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawAzureClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawAzureClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawAzureClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[AzureBlobExportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.export_storage.azure.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -128,18 +142,38 @@ def create( ) client.export_storage.azure.create() """ - _response = self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -204,19 +238,33 @@ def validate( ) client.export_storage.azure.validate() """ - _response = self._raw_client.validate( - id=id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -249,8 +297,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -282,8 +346,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -353,19 +427,38 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -399,27 +492,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncAzureClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawAzureClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawAzureClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawAzureClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[AzureBlobExportStorage]: """ @@ -459,8 +560,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -534,18 +654,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -618,19 +758,33 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -671,8 +825,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -712,8 +882,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -791,19 +971,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -845,5 +1044,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/azure/raw_client.py b/src/label_studio_sdk/export_storage/azure/raw_client.py deleted file mode 100644 index 12ef87da6..000000000 --- a/src/label_studio_sdk/export_storage/azure/raw_client.py +++ /dev/null @@ -1,881 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.azure_blob_export_storage import AzureBlobExportStorage -from .types.azure_create_response import AzureCreateResponse -from .types.azure_update_response import AzureUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawAzureClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[AzureBlobExportStorage]]: - """ - - You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[AzureBlobExportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[AzureBlobExportStorage], - parse_obj_as( - type_=typing.List[AzureBlobExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[AzureCreateResponse]: - """ - - Create a new target storage connection to Microsoft Azure Blob storage. - - For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[AzureBlobExportStorage]: - """ - - Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureBlobExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[AzureUpdateResponse]: - """ - - Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob export storage. - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[AzureBlobExportStorage]: - """ - - Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureBlobExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawAzureClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[AzureBlobExportStorage]]: - """ - - You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[AzureBlobExportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[AzureBlobExportStorage], - parse_obj_as( - type_=typing.List[AzureBlobExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[AzureCreateResponse]: - """ - - Create a new target storage connection to Microsoft Azure Blob storage. - - For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[AzureBlobExportStorage]: - """ - - Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureBlobExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[AzureUpdateResponse]: - """ - - Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob export storage. - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[AzureBlobExportStorage]: - """ - - Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureBlobExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/azure/types/__init__.py b/src/label_studio_sdk/export_storage/azure/types/__init__.py index e56fb71c1..0cb2cdbbc 100644 --- a/src/label_studio_sdk/export_storage/azure/types/__init__.py +++ b/src/label_studio_sdk/export_storage/azure/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .azure_create_response import AzureCreateResponse from .azure_update_response import AzureUpdateResponse diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py index 1f374501b..d82c30787 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class AzureCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py index 286118f96..e530bafb9 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class AzureUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/client.py b/src/label_studio_sdk/export_storage/client.py index d0158fe01..ac11dab4a 100644 --- a/src/label_studio_sdk/export_storage/client.py +++ b/src/label_studio_sdk/export_storage/client.py @@ -1,44 +1,38 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.client_wrapper import SyncClientWrapper +from .azure.client import AzureClient +from .gcs.client import GcsClient +from .local.client import LocalClient +from .redis.client import RedisClient +from .s3.client import S3Client +from .s3s.client import S3SClient import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from .azure.client import AsyncAzureClient, AzureClient -from .gcs.client import AsyncGcsClient, GcsClient -from .local.client import AsyncLocalClient, LocalClient -from .raw_client import AsyncRawExportStorageClient, RawExportStorageClient -from .redis.client import AsyncRedisClient, RedisClient -from .s3.client import AsyncS3Client, S3Client -from .s3s.client import AsyncS3SClient, S3SClient -from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem +from .types.export_storage_list_types_response_item import ( + ExportStorageListTypesResponseItem, +) +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper +from .azure.client import AsyncAzureClient +from .gcs.client import AsyncGcsClient +from .local.client import AsyncLocalClient +from .redis.client import AsyncRedisClient +from .s3.client import AsyncS3Client +from .s3s.client import AsyncS3SClient class ExportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawExportStorageClient(client_wrapper=client_wrapper) - self.azure = AzureClient(client_wrapper=client_wrapper) - - self.gcs = GcsClient(client_wrapper=client_wrapper) - - self.local = LocalClient(client_wrapper=client_wrapper) - - self.redis = RedisClient(client_wrapper=client_wrapper) - - self.s3 = S3Client(client_wrapper=client_wrapper) - - self.s3s = S3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawExportStorageClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawExportStorageClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.azure = AzureClient(client_wrapper=self._client_wrapper) + self.gcs = GcsClient(client_wrapper=self._client_wrapper) + self.local = LocalClient(client_wrapper=self._client_wrapper) + self.redis = RedisClient(client_wrapper=self._client_wrapper) + self.s3 = S3Client(client_wrapper=self._client_wrapper) + self.s3s = S3SClient(client_wrapper=self._client_wrapper) def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -65,35 +59,35 @@ def list_types( ) client.export_storage.list_types() """ - _response = self._raw_client.list_types(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncExportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawExportStorageClient(client_wrapper=client_wrapper) - self.azure = AsyncAzureClient(client_wrapper=client_wrapper) - - self.gcs = AsyncGcsClient(client_wrapper=client_wrapper) - - self.local = AsyncLocalClient(client_wrapper=client_wrapper) - - self.redis = AsyncRedisClient(client_wrapper=client_wrapper) - - self.s3 = AsyncS3Client(client_wrapper=client_wrapper) - - self.s3s = AsyncS3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawExportStorageClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawExportStorageClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) + self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) + self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) + self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) + self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) + self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -128,5 +122,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list_types(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/gcs/__init__.py b/src/label_studio_sdk/export_storage/gcs/__init__.py index 7054c2af9..2c4b3d376 100644 --- a/src/label_studio_sdk/export_storage/gcs/__init__.py +++ b/src/label_studio_sdk/export_storage/gcs/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import GcsCreateResponse, GcsUpdateResponse __all__ = ["GcsCreateResponse", "GcsUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/gcs/client.py b/src/label_studio_sdk/export_storage/gcs/client.py index e1ac120d2..d077d9d8c 100644 --- a/src/label_studio_sdk/export_storage/gcs/client.py +++ b/src/label_studio_sdk/export_storage/gcs/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_export_storage import GcsExportStorage -from .raw_client import AsyncRawGcsClient, RawGcsClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.gcs_create_response import GcsCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class GcsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawGcsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawGcsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawGcsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[GcsExportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.export_storage.gcs.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -128,18 +142,38 @@ def create( ) client.export_storage.gcs.create() """ - _response = self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -204,19 +238,33 @@ def validate( ) client.export_storage.gcs.validate() """ - _response = self._raw_client.validate( - id=id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -249,8 +297,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -282,8 +346,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -353,19 +427,38 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -399,27 +492,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncGcsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawGcsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawGcsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawGcsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[GcsExportStorage]: """ @@ -459,8 +560,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -534,18 +654,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -618,19 +758,33 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -671,8 +825,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -712,8 +882,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -791,19 +971,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -845,5 +1044,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/gcs/raw_client.py b/src/label_studio_sdk/export_storage/gcs/raw_client.py deleted file mode 100644 index 700e69127..000000000 --- a/src/label_studio_sdk/export_storage/gcs/raw_client.py +++ /dev/null @@ -1,881 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.gcs_export_storage import GcsExportStorage -from .types.gcs_create_response import GcsCreateResponse -from .types.gcs_update_response import GcsUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawGcsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[GcsExportStorage]]: - """ - - You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[GcsExportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[GcsExportStorage], - parse_obj_as( - type_=typing.List[GcsExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[GcsCreateResponse]: - """ - - Create a new target storage connection to Google Cloud Storage. - - For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[GcsExportStorage]: - """ - - Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this gcs export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[GcsUpdateResponse]: - """ - - Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs export storage. - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[GcsExportStorage]: - """ - - Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawGcsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[GcsExportStorage]]: - """ - - You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[GcsExportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[GcsExportStorage], - parse_obj_as( - type_=typing.List[GcsExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[GcsCreateResponse]: - """ - - Create a new target storage connection to Google Cloud Storage. - - For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[GcsExportStorage]: - """ - - Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this gcs export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[GcsUpdateResponse]: - """ - - Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs export storage. - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[GcsExportStorage]: - """ - - Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/gcs/types/__init__.py b/src/label_studio_sdk/export_storage/gcs/types/__init__.py index 73d049459..832c1ee1c 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/__init__.py +++ b/src/label_studio_sdk/export_storage/gcs/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .gcs_create_response import GcsCreateResponse from .gcs_update_response import GcsUpdateResponse diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py index f4c6c63a0..955c9d0cf 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class GcsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py index 86860ea58..48f05d0f3 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class GcsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/local/__init__.py b/src/label_studio_sdk/export_storage/local/__init__.py index 44e4524e0..248109b66 100644 --- a/src/label_studio_sdk/export_storage/local/__init__.py +++ b/src/label_studio_sdk/export_storage/local/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import LocalCreateResponse, LocalUpdateResponse __all__ = ["LocalCreateResponse", "LocalUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/local/client.py b/src/label_studio_sdk/export_storage/local/client.py index 3201ddb45..cf37f1c04 100644 --- a/src/label_studio_sdk/export_storage/local/client.py +++ b/src/label_studio_sdk/export_storage/local/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_export_storage import LocalFilesExportStorage -from .raw_client import AsyncRawLocalClient, RawLocalClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.local_create_response import LocalCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class LocalClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawLocalClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawLocalClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawLocalClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[LocalFilesExportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.export_storage.local.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -120,16 +134,36 @@ def create( ) client.export_storage.local.create() """ - _response = self._raw_client.create( - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -186,17 +220,31 @@ def validate( ) client.export_storage.local.validate() """ - _response = self._raw_client.validate( - id=id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -229,8 +277,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -262,8 +326,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -325,17 +399,36 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -369,27 +462,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncLocalClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawLocalClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawLocalClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawLocalClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[LocalFilesExportStorage]: """ @@ -429,8 +530,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -496,16 +616,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -570,17 +710,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -621,8 +775,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -662,8 +832,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -733,17 +913,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -787,5 +986,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/local/raw_client.py b/src/label_studio_sdk/export_storage/local/raw_client.py deleted file mode 100644 index 1019d32ec..000000000 --- a/src/label_studio_sdk/export_storage/local/raw_client.py +++ /dev/null @@ -1,821 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.local_files_export_storage import LocalFilesExportStorage -from .types.local_create_response import LocalCreateResponse -from .types.local_update_response import LocalUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawLocalClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[LocalFilesExportStorage]]: - """ - - You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[LocalFilesExportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[LocalFilesExportStorage], - parse_obj_as( - type_=typing.List[LocalFilesExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[LocalCreateResponse]: - """ - - Create a new target storage connection to a local file directory. - - For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[LocalFilesExportStorage]: - """ - - Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalFilesExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this local files export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[LocalUpdateResponse]: - """ - - Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files export storage. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[LocalFilesExportStorage]: - """ - - Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalFilesExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawLocalClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[LocalFilesExportStorage]]: - """ - - You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[LocalFilesExportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[LocalFilesExportStorage], - parse_obj_as( - type_=typing.List[LocalFilesExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[LocalCreateResponse]: - """ - - Create a new target storage connection to a local file directory. - - For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[LocalFilesExportStorage]: - """ - - Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalFilesExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this local files export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[LocalUpdateResponse]: - """ - - Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files export storage. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[LocalFilesExportStorage]: - """ - - Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalFilesExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/local/types/__init__.py b/src/label_studio_sdk/export_storage/local/types/__init__.py index 5f88d9245..9a12e8745 100644 --- a/src/label_studio_sdk/export_storage/local/types/__init__.py +++ b/src/label_studio_sdk/export_storage/local/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .local_create_response import LocalCreateResponse from .local_update_response import LocalUpdateResponse diff --git a/src/label_studio_sdk/export_storage/local/types/local_create_response.py b/src/label_studio_sdk/export_storage/local/types/local_create_response.py index 4f45cad89..95051747a 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class LocalCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/local/types/local_update_response.py b/src/label_studio_sdk/export_storage/local/types/local_update_response.py index 885189c74..e5dd8df6c 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class LocalUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/raw_client.py b/src/label_studio_sdk/export_storage/raw_client.py deleted file mode 100644 index d843d01dc..000000000 --- a/src/label_studio_sdk/export_storage/raw_client.py +++ /dev/null @@ -1,93 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem - - -class RawExportStorageClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list_types( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[ExportStorageListTypesResponseItem]]: - """ - Retrieve a list of the export storages types. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[ExportStorageListTypesResponseItem]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ExportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawExportStorageClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list_types( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[ExportStorageListTypesResponseItem]]: - """ - Retrieve a list of the export storages types. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[ExportStorageListTypesResponseItem]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ExportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/redis/__init__.py b/src/label_studio_sdk/export_storage/redis/__init__.py index e52cb2ace..7f87f18fe 100644 --- a/src/label_studio_sdk/export_storage/redis/__init__.py +++ b/src/label_studio_sdk/export_storage/redis/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import RedisCreateResponse, RedisUpdateResponse __all__ = ["RedisCreateResponse", "RedisUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/redis/client.py b/src/label_studio_sdk/export_storage/redis/client.py index 6a2ba5f52..03c8b42e5 100644 --- a/src/label_studio_sdk/export_storage/redis/client.py +++ b/src/label_studio_sdk/export_storage/redis/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_export_storage import RedisExportStorage -from .raw_client import AsyncRawRedisClient, RawRedisClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.redis_create_response import RedisCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class RedisClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawRedisClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawRedisClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawRedisClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[RedisExportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.export_storage.redis.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -132,19 +146,39 @@ def create( ) client.export_storage.redis.create() """ - _response = self._raw_client.create( - db=db, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="POST", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -213,20 +247,34 @@ def validate( ) client.export_storage.redis.validate() """ - _response = self._raw_client.validate( - id=id, - db=db, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis/validate", + method="POST", + json={ + "id": id, + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -259,8 +307,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -292,8 +356,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -367,20 +441,39 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - db=db, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -414,27 +507,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncRedisClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawRedisClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawRedisClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawRedisClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[RedisExportStorage]: """ @@ -474,8 +575,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -553,19 +673,39 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - db=db, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="POST", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -642,20 +782,34 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - db=db, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis/validate", + method="POST", + json={ + "id": id, + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -696,8 +850,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -737,8 +907,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -820,20 +1000,39 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - db=db, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -875,5 +1074,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/redis/raw_client.py b/src/label_studio_sdk/export_storage/redis/raw_client.py deleted file mode 100644 index 707afb81e..000000000 --- a/src/label_studio_sdk/export_storage/redis/raw_client.py +++ /dev/null @@ -1,911 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.redis_export_storage import RedisExportStorage -from .types.redis_create_response import RedisCreateResponse -from .types.redis_update_response import RedisUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawRedisClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[RedisExportStorage]]: - """ - - You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[RedisExportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[RedisExportStorage], - parse_obj_as( - type_=typing.List[RedisExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - db: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[RedisCreateResponse]: - """ - - Create a new target storage connection to Redis. - - For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - db : typing.Optional[int] - Database ID of database to use - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="POST", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - db: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - db : typing.Optional[int] - Database ID of database to use - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis/validate", - method="POST", - json={ - "id": id, - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[RedisExportStorage]: - """ - - Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this redis export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - db: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[RedisUpdateResponse]: - """ - - Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis export storage. - - db : typing.Optional[int] - Database ID of database to use - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[RedisExportStorage]: - """ - - Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawRedisClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[RedisExportStorage]]: - """ - - You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[RedisExportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[RedisExportStorage], - parse_obj_as( - type_=typing.List[RedisExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - db: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[RedisCreateResponse]: - """ - - Create a new target storage connection to Redis. - - For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - db : typing.Optional[int] - Database ID of database to use - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="POST", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - db: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - db : typing.Optional[int] - Database ID of database to use - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis/validate", - method="POST", - json={ - "id": id, - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[RedisExportStorage]: - """ - - Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this redis export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - db: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[RedisUpdateResponse]: - """ - - Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis export storage. - - db : typing.Optional[int] - Database ID of database to use - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[RedisExportStorage]: - """ - - Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/redis/types/__init__.py b/src/label_studio_sdk/export_storage/redis/types/__init__.py index aea7ed291..b3557bc08 100644 --- a/src/label_studio_sdk/export_storage/redis/types/__init__.py +++ b/src/label_studio_sdk/export_storage/redis/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .redis_create_response import RedisCreateResponse from .redis_update_response import RedisUpdateResponse diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py index c57c0ace4..7aab4a479 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class RedisCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py index 7787c488a..8eec3c821 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class RedisUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3/__init__.py b/src/label_studio_sdk/export_storage/s3/__init__.py index e6421caaf..c749fe227 100644 --- a/src/label_studio_sdk/export_storage/s3/__init__.py +++ b/src/label_studio_sdk/export_storage/s3/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import S3CreateResponse, S3UpdateResponse __all__ = ["S3CreateResponse", "S3UpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/s3/client.py b/src/label_studio_sdk/export_storage/s3/client.py index d32dc2b76..9b98d193a 100644 --- a/src/label_studio_sdk/export_storage/s3/client.py +++ b/src/label_studio_sdk/export_storage/s3/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3export_storage import S3ExportStorage -from .raw_client import AsyncRawS3Client, RawS3Client +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.s3create_response import S3CreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class S3Client: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawS3Client(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawS3Client: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawS3Client - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3ExportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.export_storage.s3.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -144,22 +158,42 @@ def create( ) client.export_storage.s3.create() """ - _response = self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -240,23 +274,37 @@ def validate( ) client.export_storage.s3.validate() """ - _response = self._raw_client.validate( - id=id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -289,8 +337,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -322,8 +386,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -409,23 +483,42 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -459,27 +552,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncS3Client: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawS3Client(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawS3Client: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawS3Client - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3ExportStorage]: """ @@ -519,8 +620,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -610,22 +730,42 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -714,23 +854,37 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -771,8 +925,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -812,8 +982,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -907,23 +1087,42 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -965,5 +1164,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/s3/raw_client.py b/src/label_studio_sdk/export_storage/s3/raw_client.py deleted file mode 100644 index ed6014980..000000000 --- a/src/label_studio_sdk/export_storage/s3/raw_client.py +++ /dev/null @@ -1,999 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.s3export_storage import S3ExportStorage -from .types.s3create_response import S3CreateResponse -from .types.s3update_response import S3UpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawS3Client: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[S3ExportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[S3ExportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3ExportStorage], - parse_obj_as( - type_=typing.List[S3ExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3CreateResponse]: - """ - - Create a new target storage connection to S3 storage. - - For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3CreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[S3ExportStorage]: - """ - - Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3ExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this s3 export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3UpdateResponse]: - """ - - Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 export storage. - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3UpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[S3ExportStorage]: - """ - - Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3ExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawS3Client: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[S3ExportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[S3ExportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3ExportStorage], - parse_obj_as( - type_=typing.List[S3ExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3CreateResponse]: - """ - - Create a new target storage connection to S3 storage. - - For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3CreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[S3ExportStorage]: - """ - - Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3ExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this s3 export storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3UpdateResponse]: - """ - - Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 export storage. - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3UpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[S3ExportStorage]: - """ - - Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3ExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/s3/types/__init__.py b/src/label_studio_sdk/export_storage/s3/types/__init__.py index bb333983a..3cc20ce7b 100644 --- a/src/label_studio_sdk/export_storage/s3/types/__init__.py +++ b/src/label_studio_sdk/export_storage/s3/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .s3create_response import S3CreateResponse from .s3update_response import S3UpdateResponse diff --git a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py index ff883c191..cc74a5583 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic import typing_extensions -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3CreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py index c3ee36914..466c2b64c 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic import typing_extensions -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3UpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3s/__init__.py b/src/label_studio_sdk/export_storage/s3s/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/export_storage/s3s/__init__.py +++ b/src/label_studio_sdk/export_storage/s3s/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/export_storage/s3s/client.py b/src/label_studio_sdk/export_storage/s3s/client.py index 6dde0d936..eba7a105f 100644 --- a/src/label_studio_sdk/export_storage/s3s/client.py +++ b/src/label_studio_sdk/export_storage/s3s/client.py @@ -1,11 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3s_export_storage import S3SExportStorage -from .raw_client import AsyncRawS3SClient, RawS3SClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from ...core.jsonable_encoder import jsonable_encoder +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -13,21 +16,13 @@ class S3SClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawS3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawS3SClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawS3SClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3SExportStorage]: """ @@ -59,8 +54,27 @@ def list( ) client.export_storage.s3s.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -132,20 +146,40 @@ def create( ) client.export_storage.s3s.create() """ - _response = self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ @@ -176,8 +210,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -207,8 +257,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -284,21 +344,40 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -367,39 +446,45 @@ def validate( ) client.export_storage.s3s.validate() """ - _response = self._raw_client.validate( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncS3SClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawS3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawS3SClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawS3SClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3SExportStorage]: """ @@ -439,8 +524,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -520,20 +624,40 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ @@ -572,8 +696,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -611,8 +751,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -696,21 +846,40 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -787,17 +956,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - can_delete_objects=can_delete_objects, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/s3s/raw_client.py b/src/label_studio_sdk/export_storage/s3s/raw_client.py deleted file mode 100644 index c230c2660..000000000 --- a/src/label_studio_sdk/export_storage/s3s/raw_client.py +++ /dev/null @@ -1,827 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.s3s_export_storage import S3SExportStorage - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawS3SClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[S3SExportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[S3SExportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3SExportStorage], - parse_obj_as( - type_=typing.List[S3SExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3SExportStorage]: - """ - - Create a new target storage connection to a S3 bucket with IAM role access. - - For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3SExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[S3SExportStorage]: - """ - - Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Parameters - ---------- - id : int - Export storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3SExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Parameters - ---------- - id : int - Export storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3SExportStorage]: - """ - - Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Parameters - ---------- - id : int - Export storage ID - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3SExportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s/validate", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawS3SClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[S3SExportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[S3SExportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3SExportStorage], - parse_obj_as( - type_=typing.List[S3SExportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3SExportStorage]: - """ - - Create a new target storage connection to a S3 bucket with IAM role access. - - For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3SExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[S3SExportStorage]: - """ - - Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Parameters - ---------- - id : int - Export storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3SExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Parameters - ---------- - id : int - Export storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3SExportStorage]: - """ - - Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). - - Parameters - ---------- - id : int - Export storage ID - - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3SExportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - can_delete_objects: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. - - Parameters - ---------- - can_delete_objects : typing.Optional[bool] - Deletion from storage enabled. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s/validate", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/types/__init__.py b/src/label_studio_sdk/export_storage/types/__init__.py index 1539a9784..58de62c07 100644 --- a/src/label_studio_sdk/export_storage/types/__init__.py +++ b/src/label_studio_sdk/export_storage/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .export_storage_list_types_response_item import ExportStorageListTypesResponseItem __all__ = ["ExportStorageListTypesResponseItem"] diff --git a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py index 50f3659ad..684e9172a 100644 --- a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py +++ b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - +from ...core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ExportStorageListTypesResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/files/__init__.py b/src/label_studio_sdk/files/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/files/__init__.py +++ b/src/label_studio_sdk/files/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/files/client.py b/src/label_studio_sdk/files/client.py index 9b68c2ce6..a832258c3 100644 --- a/src/label_studio_sdk/files/client.py +++ b/src/label_studio_sdk/files/client.py @@ -1,11 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.file_upload import FileUpload -from .raw_client import AsyncRawFilesClient, RawFilesClient +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -13,18 +16,7 @@ class FilesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawFilesClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawFilesClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawFilesClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> FileUpload: """ @@ -54,8 +46,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -84,8 +92,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -132,8 +150,29 @@ def update( id_=1, ) """ - _response = self._raw_client.update(id_, id=id, file=file, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id_)}", + method="PATCH", + json={ + "id": id, + "file": file, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def list( self, @@ -179,8 +218,28 @@ def list( id=1, ) """ - _response = self._raw_client.list(id, all_=all_, ids=ids, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="GET", + params={ + "all": all_, + "ids": ids, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -212,8 +271,18 @@ def delete_many(self, id: int, *, request_options: typing.Optional[RequestOption id=1, ) """ - _response = self._raw_client.delete_many(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -241,24 +310,23 @@ def download(self, filename: str, *, request_options: typing.Optional[RequestOpt filename="filename", ) """ - _response = self._raw_client.download(filename, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncFilesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawFilesClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawFilesClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawFilesClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> FileUpload: """ @@ -296,8 +364,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -334,8 +418,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -390,8 +484,29 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update(id_, id=id, file=file, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id_)}", + method="PATCH", + json={ + "id": id, + "file": file, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def list( self, @@ -445,8 +560,28 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(id, all_=all_, ids=ids, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="GET", + params={ + "all": all_, + "ids": ids, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -486,8 +621,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete_many(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -523,5 +668,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.download(filename, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/files/raw_client.py b/src/label_studio_sdk/files/raw_client.py deleted file mode 100644 index 756cbf4b8..000000000 --- a/src/label_studio_sdk/files/raw_client.py +++ /dev/null @@ -1,523 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.file_upload import FileUpload - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawFilesClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[FileUpload]: - """ - Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[FileUpload] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id_: int, - *, - id: typing.Optional[int] = OMIT, - file: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[FileUpload]: - """ - - Update a specific uploaded file. To get the file upload ID, use [Get files list](list). - - You will need to include the file data in the request body. For example: - ```bash - curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ - ``` - - Parameters - ---------- - id_ : int - A unique integer value identifying this file upload. - - id : typing.Optional[int] - - file : typing.Optional[str] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[FileUpload] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id_)}", - method="PATCH", - json={ - "id": id, - "file": file, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def list( - self, - id: int, - *, - all_: typing.Optional[bool] = None, - ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[typing.List[FileUpload]]: - """ - - Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - all_ : typing.Optional[bool] - Set to "true" if you want to retrieve all file uploads - - ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] - Specify the list of file upload IDs to retrieve, e.g. ids=[1,2,3] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[FileUpload]] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="GET", - params={ - "all": all_, - "ids": ids, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[FileUpload], - parse_obj_as( - type_=typing.List[FileUpload], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. - - Parameters - ---------- - filename : str - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawFilesClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[FileUpload]: - """ - Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[FileUpload] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id_: int, - *, - id: typing.Optional[int] = OMIT, - file: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[FileUpload]: - """ - - Update a specific uploaded file. To get the file upload ID, use [Get files list](list). - - You will need to include the file data in the request body. For example: - ```bash - curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ - ``` - - Parameters - ---------- - id_ : int - A unique integer value identifying this file upload. - - id : typing.Optional[int] - - file : typing.Optional[str] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[FileUpload] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id_)}", - method="PATCH", - json={ - "id": id, - "file": file, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def list( - self, - id: int, - *, - all_: typing.Optional[bool] = None, - ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[typing.List[FileUpload]]: - """ - - Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - all_ : typing.Optional[bool] - Set to "true" if you want to retrieve all file uploads - - ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] - Specify the list of file upload IDs to retrieve, e.g. ids=[1,2,3] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[FileUpload]] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="GET", - params={ - "all": all_, - "ids": ids, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[FileUpload], - parse_obj_as( - type_=typing.List[FileUpload], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete_many( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this file upload. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def download( - self, filename: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. - - Parameters - ---------- - filename : str - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/__init__.py b/src/label_studio_sdk/import_storage/__init__.py index b2e5e5130..51599b165 100644 --- a/src/label_studio_sdk/import_storage/__init__.py +++ b/src/label_studio_sdk/import_storage/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ImportStorageListTypesResponseItem from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse diff --git a/src/label_studio_sdk/import_storage/azure/__init__.py b/src/label_studio_sdk/import_storage/azure/__init__.py index 97dcea344..323fc5f3d 100644 --- a/src/label_studio_sdk/import_storage/azure/__init__.py +++ b/src/label_studio_sdk/import_storage/azure/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import AzureCreateResponse, AzureUpdateResponse __all__ = ["AzureCreateResponse", "AzureUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/azure/client.py b/src/label_studio_sdk/import_storage/azure/client.py index 2b70c67fe..be43dd3da 100644 --- a/src/label_studio_sdk/import_storage/azure/client.py +++ b/src/label_studio_sdk/import_storage/azure/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_import_storage import AzureBlobImportStorage -from .raw_client import AsyncRawAzureClient, RawAzureClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.azure_create_response import AzureCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class AzureClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawAzureClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawAzureClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawAzureClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[AzureBlobImportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.import_storage.azure.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -142,21 +156,41 @@ def create( ) client.import_storage.azure.create() """ - _response = self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -233,22 +267,36 @@ def validate( ) client.import_storage.azure.validate() """ - _response = self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -281,8 +329,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -316,8 +380,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -399,22 +473,41 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -449,27 +542,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncAzureClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawAzureClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawAzureClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawAzureClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[AzureBlobImportStorage]: """ @@ -509,8 +610,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -598,21 +718,41 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -697,22 +837,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -753,8 +907,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -796,8 +966,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -887,22 +1067,41 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - container=container, - prefix=prefix, - account_name=account_name, - account_key=account_key, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -945,5 +1144,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/azure/raw_client.py b/src/label_studio_sdk/import_storage/azure/raw_client.py deleted file mode 100644 index 6ebcfaff5..000000000 --- a/src/label_studio_sdk/import_storage/azure/raw_client.py +++ /dev/null @@ -1,981 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.azure_blob_import_storage import AzureBlobImportStorage -from .types.azure_create_response import AzureCreateResponse -from .types.azure_update_response import AzureUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawAzureClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[AzureBlobImportStorage]]: - """ - - You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[AzureBlobImportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[AzureBlobImportStorage], - parse_obj_as( - type_=typing.List[AzureBlobImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[AzureCreateResponse]: - """ - - Create a new source storage connection to Microsoft Azure Blob storage. - - For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[AzureBlobImportStorage]: - """ - - Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureBlobImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[AzureUpdateResponse]: - """ - - Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[AzureBlobImportStorage]: - """ - - Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AzureBlobImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawAzureClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[AzureBlobImportStorage]]: - """ - - You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[AzureBlobImportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[AzureBlobImportStorage], - parse_obj_as( - type_=typing.List[AzureBlobImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[AzureCreateResponse]: - """ - - Create a new source storage connection to Microsoft Azure Blob storage. - - For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[AzureBlobImportStorage]: - """ - - Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureBlobImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - container: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - account_name: typing.Optional[str] = OMIT, - account_key: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[AzureUpdateResponse]: - """ - - Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this azure blob import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - container : typing.Optional[str] - Azure blob container - - prefix : typing.Optional[str] - Azure blob prefix name - - account_name : typing.Optional[str] - Azure Blob account name - - account_key : typing.Optional[str] - Azure Blob account key - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[AzureBlobImportStorage]: - """ - - Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AzureBlobImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/azure/types/__init__.py b/src/label_studio_sdk/import_storage/azure/types/__init__.py index e56fb71c1..0cb2cdbbc 100644 --- a/src/label_studio_sdk/import_storage/azure/types/__init__.py +++ b/src/label_studio_sdk/import_storage/azure/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .azure_create_response import AzureCreateResponse from .azure_update_response import AzureUpdateResponse diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py index b6cd9028b..b59cf0117 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class AzureCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py index 207aa6d05..afacbeb28 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class AzureUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/client.py b/src/label_studio_sdk/import_storage/client.py index 2fada220a..c4a97e1ad 100644 --- a/src/label_studio_sdk/import_storage/client.py +++ b/src/label_studio_sdk/import_storage/client.py @@ -1,44 +1,38 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.client_wrapper import SyncClientWrapper +from .azure.client import AzureClient +from .gcs.client import GcsClient +from .local.client import LocalClient +from .redis.client import RedisClient +from .s3.client import S3Client +from .s3s.client import S3SClient import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from .azure.client import AsyncAzureClient, AzureClient -from .gcs.client import AsyncGcsClient, GcsClient -from .local.client import AsyncLocalClient, LocalClient -from .raw_client import AsyncRawImportStorageClient, RawImportStorageClient -from .redis.client import AsyncRedisClient, RedisClient -from .s3.client import AsyncS3Client, S3Client -from .s3s.client import AsyncS3SClient, S3SClient -from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem +from .types.import_storage_list_types_response_item import ( + ImportStorageListTypesResponseItem, +) +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper +from .azure.client import AsyncAzureClient +from .gcs.client import AsyncGcsClient +from .local.client import AsyncLocalClient +from .redis.client import AsyncRedisClient +from .s3.client import AsyncS3Client +from .s3s.client import AsyncS3SClient class ImportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawImportStorageClient(client_wrapper=client_wrapper) - self.azure = AzureClient(client_wrapper=client_wrapper) - - self.gcs = GcsClient(client_wrapper=client_wrapper) - - self.local = LocalClient(client_wrapper=client_wrapper) - - self.redis = RedisClient(client_wrapper=client_wrapper) - - self.s3 = S3Client(client_wrapper=client_wrapper) - - self.s3s = S3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawImportStorageClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawImportStorageClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.azure = AzureClient(client_wrapper=self._client_wrapper) + self.gcs = GcsClient(client_wrapper=self._client_wrapper) + self.local = LocalClient(client_wrapper=self._client_wrapper) + self.redis = RedisClient(client_wrapper=self._client_wrapper) + self.s3 = S3Client(client_wrapper=self._client_wrapper) + self.s3s = S3SClient(client_wrapper=self._client_wrapper) def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -65,35 +59,35 @@ def list_types( ) client.import_storage.list_types() """ - _response = self._raw_client.list_types(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncImportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawImportStorageClient(client_wrapper=client_wrapper) - self.azure = AsyncAzureClient(client_wrapper=client_wrapper) - - self.gcs = AsyncGcsClient(client_wrapper=client_wrapper) - - self.local = AsyncLocalClient(client_wrapper=client_wrapper) - - self.redis = AsyncRedisClient(client_wrapper=client_wrapper) - - self.s3 = AsyncS3Client(client_wrapper=client_wrapper) - - self.s3s = AsyncS3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawImportStorageClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawImportStorageClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) + self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) + self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) + self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) + self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) + self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -128,5 +122,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list_types(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/gcs/__init__.py b/src/label_studio_sdk/import_storage/gcs/__init__.py index 7054c2af9..2c4b3d376 100644 --- a/src/label_studio_sdk/import_storage/gcs/__init__.py +++ b/src/label_studio_sdk/import_storage/gcs/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import GcsCreateResponse, GcsUpdateResponse __all__ = ["GcsCreateResponse", "GcsUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/gcs/client.py b/src/label_studio_sdk/import_storage/gcs/client.py index aeca74bdc..9007140ab 100644 --- a/src/label_studio_sdk/import_storage/gcs/client.py +++ b/src/label_studio_sdk/import_storage/gcs/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_import_storage import GcsImportStorage -from .raw_client import AsyncRawGcsClient, RawGcsClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.gcs_create_response import GcsCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class GcsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawGcsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawGcsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawGcsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[GcsImportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.import_storage.gcs.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -142,21 +156,41 @@ def create( ) client.import_storage.gcs.create() """ - _response = self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -233,22 +267,36 @@ def validate( ) client.import_storage.gcs.validate() """ - _response = self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -281,8 +329,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -316,8 +380,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -399,22 +473,41 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -449,27 +542,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncGcsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawGcsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawGcsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawGcsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[GcsImportStorage]: """ @@ -509,8 +610,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -598,21 +718,41 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -697,22 +837,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -753,8 +907,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -796,8 +966,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -887,22 +1067,41 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - google_application_credentials=google_application_credentials, - google_project_id=google_project_id, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -945,5 +1144,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/gcs/raw_client.py b/src/label_studio_sdk/import_storage/gcs/raw_client.py deleted file mode 100644 index 2e153f0f0..000000000 --- a/src/label_studio_sdk/import_storage/gcs/raw_client.py +++ /dev/null @@ -1,981 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.gcs_import_storage import GcsImportStorage -from .types.gcs_create_response import GcsCreateResponse -from .types.gcs_update_response import GcsUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawGcsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[GcsImportStorage]]: - """ - - You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[GcsImportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[GcsImportStorage], - parse_obj_as( - type_=typing.List[GcsImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[GcsCreateResponse]: - """ - - Create a new source storage connection to a Google Cloud Storage bucket. - - For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[GcsImportStorage]: - """ - - Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this gcs import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[GcsUpdateResponse]: - """ - - Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[GcsImportStorage]: - """ - - Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[GcsImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawGcsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[GcsImportStorage]]: - """ - - You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[GcsImportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[GcsImportStorage], - parse_obj_as( - type_=typing.List[GcsImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[GcsCreateResponse]: - """ - - Create a new source storage connection to a Google Cloud Storage bucket. - - For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[GcsImportStorage]: - """ - - Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this gcs import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - google_application_credentials: typing.Optional[str] = OMIT, - google_project_id: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[GcsUpdateResponse]: - """ - - Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this gcs import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for direct download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - GCS bucket name - - prefix : typing.Optional[str] - GCS bucket prefix - - google_application_credentials : typing.Optional[str] - The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. - - google_project_id : typing.Optional[str] - Google project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[GcsImportStorage]: - """ - - Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[GcsImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/gcs/types/__init__.py b/src/label_studio_sdk/import_storage/gcs/types/__init__.py index 73d049459..832c1ee1c 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/__init__.py +++ b/src/label_studio_sdk/import_storage/gcs/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .gcs_create_response import GcsCreateResponse from .gcs_update_response import GcsUpdateResponse diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py index 7950f54b0..58c05a731 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class GcsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py index 96e92949b..54c7e415c 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class GcsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/local/__init__.py b/src/label_studio_sdk/import_storage/local/__init__.py index 44e4524e0..248109b66 100644 --- a/src/label_studio_sdk/import_storage/local/__init__.py +++ b/src/label_studio_sdk/import_storage/local/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import LocalCreateResponse, LocalUpdateResponse __all__ = ["LocalCreateResponse", "LocalUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/local/client.py b/src/label_studio_sdk/import_storage/local/client.py index 08bd1b9ba..fb51c56d2 100644 --- a/src/label_studio_sdk/import_storage/local/client.py +++ b/src/label_studio_sdk/import_storage/local/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_import_storage import LocalFilesImportStorage -from .raw_client import AsyncRawLocalClient, RawLocalClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.local_create_response import LocalCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class LocalClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawLocalClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawLocalClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawLocalClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[LocalFilesImportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.import_storage.local.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -120,16 +134,36 @@ def create( ) client.import_storage.local.create() """ - _response = self._raw_client.create( - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -186,17 +220,31 @@ def validate( ) client.import_storage.local.validate() """ - _response = self._raw_client.validate( - id=id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -229,8 +277,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -264,8 +328,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -327,17 +401,36 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -372,27 +465,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncLocalClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawLocalClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawLocalClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawLocalClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[LocalFilesImportStorage]: """ @@ -432,8 +533,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -499,16 +619,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -573,17 +713,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -624,8 +778,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -667,8 +837,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -738,17 +918,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - title=title, - description=description, - project=project, - path=path, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -793,5 +992,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/local/raw_client.py b/src/label_studio_sdk/import_storage/local/raw_client.py deleted file mode 100644 index e631df782..000000000 --- a/src/label_studio_sdk/import_storage/local/raw_client.py +++ /dev/null @@ -1,827 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.local_files_import_storage import LocalFilesImportStorage -from .types.local_create_response import LocalCreateResponse -from .types.local_update_response import LocalUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawLocalClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[LocalFilesImportStorage]]: - """ - - If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[LocalFilesImportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[LocalFilesImportStorage], - parse_obj_as( - type_=typing.List[LocalFilesImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[LocalCreateResponse]: - """ - - Create a new source storage connection to a local file directory. - - For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[LocalFilesImportStorage]: - """ - - Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalFilesImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this local files import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[LocalUpdateResponse]: - """ - - Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files import storage. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[LocalFilesImportStorage]: - """ - - Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[LocalFilesImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawLocalClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[LocalFilesImportStorage]]: - """ - - If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[LocalFilesImportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[LocalFilesImportStorage], - parse_obj_as( - type_=typing.List[LocalFilesImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[LocalCreateResponse]: - """ - - Create a new source storage connection to a local file directory. - - For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[LocalFilesImportStorage]: - """ - - Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalFilesImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this local files import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[LocalUpdateResponse]: - """ - - Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this local files import storage. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Path to local directory - - regex_filter : typing.Optional[str] - Regex for filtering objects - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[LocalFilesImportStorage]: - """ - - Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[LocalFilesImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/local/types/__init__.py b/src/label_studio_sdk/import_storage/local/types/__init__.py index 5f88d9245..9a12e8745 100644 --- a/src/label_studio_sdk/import_storage/local/types/__init__.py +++ b/src/label_studio_sdk/import_storage/local/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .local_create_response import LocalCreateResponse from .local_update_response import LocalUpdateResponse diff --git a/src/label_studio_sdk/import_storage/local/types/local_create_response.py b/src/label_studio_sdk/import_storage/local/types/local_create_response.py index 4f45cad89..95051747a 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class LocalCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/local/types/local_update_response.py b/src/label_studio_sdk/import_storage/local/types/local_update_response.py index 885189c74..e5dd8df6c 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class LocalUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/raw_client.py b/src/label_studio_sdk/import_storage/raw_client.py deleted file mode 100644 index 71f3ffbe2..000000000 --- a/src/label_studio_sdk/import_storage/raw_client.py +++ /dev/null @@ -1,93 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem - - -class RawImportStorageClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list_types( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[ImportStorageListTypesResponseItem]]: - """ - Retrieve a list of the import storages types. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[ImportStorageListTypesResponseItem]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ImportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawImportStorageClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list_types( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[ImportStorageListTypesResponseItem]]: - """ - Retrieve a list of the import storages types. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[ImportStorageListTypesResponseItem]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ImportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/redis/__init__.py b/src/label_studio_sdk/import_storage/redis/__init__.py index e52cb2ace..7f87f18fe 100644 --- a/src/label_studio_sdk/import_storage/redis/__init__.py +++ b/src/label_studio_sdk/import_storage/redis/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import RedisCreateResponse, RedisUpdateResponse __all__ = ["RedisCreateResponse", "RedisUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/redis/client.py b/src/label_studio_sdk/import_storage/redis/client.py index fa8ac2684..543b2740c 100644 --- a/src/label_studio_sdk/import_storage/redis/client.py +++ b/src/label_studio_sdk/import_storage/redis/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_import_storage import RedisImportStorage -from .raw_client import AsyncRawRedisClient, RawRedisClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.redis_create_response import RedisCreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class RedisClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawRedisClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawRedisClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawRedisClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[RedisImportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.import_storage.redis.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -132,19 +146,39 @@ def create( ) client.import_storage.redis.create() """ - _response = self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -213,20 +247,34 @@ def validate( ) client.import_storage.redis.validate() """ - _response = self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -259,8 +307,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -294,8 +358,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -369,20 +443,39 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -417,27 +510,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncRedisClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawRedisClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawRedisClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawRedisClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[RedisImportStorage]: """ @@ -477,8 +578,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -556,19 +676,39 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -645,20 +785,34 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -699,8 +853,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -742,8 +912,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -825,20 +1005,39 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - title=title, - description=description, - project=project, - path=path, - host=host, - port=port, - password=password, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -881,5 +1080,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/redis/raw_client.py b/src/label_studio_sdk/import_storage/redis/raw_client.py deleted file mode 100644 index 2411795ad..000000000 --- a/src/label_studio_sdk/import_storage/redis/raw_client.py +++ /dev/null @@ -1,917 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.redis_import_storage import RedisImportStorage -from .types.redis_create_response import RedisCreateResponse -from .types.redis_update_response import RedisUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawRedisClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[RedisImportStorage]]: - """ - - You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[RedisImportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[RedisImportStorage], - parse_obj_as( - type_=typing.List[RedisImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[RedisCreateResponse]: - """ - - Create a new source storage connection to a Redis database. - - For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[RedisImportStorage]: - """ - - Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this redis import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[RedisUpdateResponse]: - """ - - Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[RedisImportStorage]: - """ - - Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RedisImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawRedisClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[RedisImportStorage]]: - """ - - You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[RedisImportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[RedisImportStorage], - parse_obj_as( - type_=typing.List[RedisImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[RedisCreateResponse]: - """ - - Create a new source storage connection to a Redis database. - - For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[RedisImportStorage]: - """ - - Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this redis import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - path: typing.Optional[str] = OMIT, - host: typing.Optional[str] = OMIT, - port: typing.Optional[str] = OMIT, - password: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[RedisUpdateResponse]: - """ - - Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this redis import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - path : typing.Optional[str] - Storage prefix (optional) - - host : typing.Optional[str] - Server Host IP (optional) - - port : typing.Optional[str] - Server Port (optional) - - password : typing.Optional[str] - Server Password (optional) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[RedisImportStorage]: - """ - - Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RedisImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/redis/types/__init__.py b/src/label_studio_sdk/import_storage/redis/types/__init__.py index aea7ed291..b3557bc08 100644 --- a/src/label_studio_sdk/import_storage/redis/types/__init__.py +++ b/src/label_studio_sdk/import_storage/redis/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .redis_create_response import RedisCreateResponse from .redis_update_response import RedisUpdateResponse diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py index c2dda07b6..fa8fba715 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class RedisCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py index 6d9310ee1..247ff9057 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class RedisUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3/__init__.py b/src/label_studio_sdk/import_storage/s3/__init__.py index e6421caaf..c749fe227 100644 --- a/src/label_studio_sdk/import_storage/s3/__init__.py +++ b/src/label_studio_sdk/import_storage/s3/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import S3CreateResponse, S3UpdateResponse __all__ = ["S3CreateResponse", "S3UpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/s3/client.py b/src/label_studio_sdk/import_storage/s3/client.py index 6219d680c..3884545d8 100644 --- a/src/label_studio_sdk/import_storage/s3/client.py +++ b/src/label_studio_sdk/import_storage/s3/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3import_storage import S3ImportStorage -from .raw_client import AsyncRawS3Client, RawS3Client +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from .types.s3create_response import S3CreateResponse +from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +18,13 @@ class S3Client: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawS3Client(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawS3Client: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawS3Client - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3ImportStorage]: """ @@ -61,8 +56,27 @@ def list( ) client.import_storage.s3.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -162,26 +176,46 @@ def create( ) client.import_storage.s3.create() """ - _response = self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -278,27 +312,41 @@ def validate( ) client.import_storage.s3.validate() """ - _response = self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -331,8 +379,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -366,8 +430,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -469,27 +543,46 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -524,27 +617,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncS3Client: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawS3Client(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawS3Client: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawS3Client - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3ImportStorage]: """ @@ -584,8 +685,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -693,26 +813,46 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -817,27 +957,41 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - id=id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -878,8 +1032,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -921,8 +1091,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -1032,27 +1212,46 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - aws_session_token=aws_session_token, - aws_sse_kms_key_id=aws_sse_kms_key_id, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -1095,5 +1294,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/s3/raw_client.py b/src/label_studio_sdk/import_storage/s3/raw_client.py deleted file mode 100644 index 9604ce40d..000000000 --- a/src/label_studio_sdk/import_storage/s3/raw_client.py +++ /dev/null @@ -1,1129 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.s3import_storage import S3ImportStorage -from .types.s3create_response import S3CreateResponse -from .types.s3update_response import S3UpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawS3Client: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[S3ImportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[S3ImportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3ImportStorage], - parse_obj_as( - type_=typing.List[S3ImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3CreateResponse]: - """ - - Create a new source storage connection to a S3 bucket. - - For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3CreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[S3ImportStorage]: - """ - - Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3ImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this s3 import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3UpdateResponse]: - """ - - Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3UpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[S3ImportStorage]: - """ - - Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3ImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawS3Client: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[S3ImportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[S3ImportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3ImportStorage], - parse_obj_as( - type_=typing.List[S3ImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3CreateResponse]: - """ - - Create a new source storage connection to a S3 bucket. - - For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3CreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - id: typing.Optional[int] = OMIT, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - id : typing.Optional[int] - Storage ID. If set, storage with specified ID will be updated - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[S3ImportStorage]: - """ - - Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3ImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - A unique integer value identifying this s3 import storage. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - aws_access_key_id: typing.Optional[str] = OMIT, - aws_secret_access_key: typing.Optional[str] = OMIT, - aws_session_token: typing.Optional[str] = OMIT, - aws_sse_kms_key_id: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3UpdateResponse]: - """ - - Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - A unique integer value identifying this s3 import storage. - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - aws_access_key_id : typing.Optional[str] - AWS_ACCESS_KEY_ID - - aws_secret_access_key : typing.Optional[str] - AWS_SECRET_ACCESS_KEY - - aws_session_token : typing.Optional[str] - AWS_SESSION_TOKEN - - aws_sse_kms_key_id : typing.Optional[str] - AWS SSE KMS Key ID - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3UpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[S3ImportStorage]: - """ - - Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. - - Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3ImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/s3/types/__init__.py b/src/label_studio_sdk/import_storage/s3/types/__init__.py index bb333983a..3cc20ce7b 100644 --- a/src/label_studio_sdk/import_storage/s3/types/__init__.py +++ b/src/label_studio_sdk/import_storage/s3/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .s3create_response import S3CreateResponse from .s3update_response import S3UpdateResponse diff --git a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py index ac8bed298..86b6e5fd3 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic import typing_extensions -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3CreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py index 95d6fe001..3ed56596d 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic import typing_extensions -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3UpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3s/__init__.py b/src/label_studio_sdk/import_storage/s3s/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/import_storage/s3s/__init__.py +++ b/src/label_studio_sdk/import_storage/s3s/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/import_storage/s3s/client.py b/src/label_studio_sdk/import_storage/s3s/client.py index 4f39746fe..a5163a1a5 100644 --- a/src/label_studio_sdk/import_storage/s3s/client.py +++ b/src/label_studio_sdk/import_storage/s3s/client.py @@ -1,11 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3s_import_storage import S3SImportStorage -from .raw_client import AsyncRawS3SClient, RawS3SClient +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from ...core.jsonable_encoder import jsonable_encoder +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -13,21 +16,13 @@ class S3SClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawS3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawS3SClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawS3SClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3SImportStorage]: """ @@ -59,8 +54,27 @@ def list( ) client.import_storage.s3s.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -152,24 +166,44 @@ def create( ) client.import_storage.s3s.create() """ - _response = self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -200,8 +234,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -235,8 +285,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -330,25 +390,44 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate( self, @@ -433,24 +512,38 @@ def validate( ) client.import_storage.s3s.validate() """ - _response = self._raw_client.validate( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -481,27 +574,35 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncS3SClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawS3SClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawS3SClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawS3SClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[S3SImportStorage]: """ @@ -541,8 +642,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -642,24 +762,44 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -698,8 +838,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -741,8 +897,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -844,25 +1010,44 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate( self, @@ -955,24 +1140,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate( - regex_filter=regex_filter, - use_blob_urls=use_blob_urls, - presign=presign, - presign_ttl=presign_ttl, - recursive_scan=recursive_scan, - title=title, - description=description, - project=project, - bucket=bucket, - prefix=prefix, - external_id=external_id, - role_arn=role_arn, - region_name=region_name, - s3endpoint=s3endpoint, + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -1011,5 +1210,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.sync(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/s3s/raw_client.py b/src/label_studio_sdk/import_storage/s3s/raw_client.py deleted file mode 100644 index 52f3367ae..000000000 --- a/src/label_studio_sdk/import_storage/s3s/raw_client.py +++ /dev/null @@ -1,1047 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.s3s_import_storage import S3SImportStorage - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawS3SClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[S3SImportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[S3SImportStorage]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3SImportStorage], - parse_obj_as( - type_=typing.List[S3SImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3SImportStorage]: - """ - - Create a new source storage connection to a S3 bucket. - - For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3SImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[S3SImportStorage]: - """ - - Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Parameters - ---------- - id : int - Import storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3SImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - Import storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[S3SImportStorage]: - """ - - Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - Import storage ID - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3SImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s/validate", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[S3SImportStorage]: - """ - - Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[S3SImportStorage] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawS3SClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[S3SImportStorage]]: - """ - - You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[S3SImportStorage]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[S3SImportStorage], - parse_obj_as( - type_=typing.List[S3SImportStorage], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3SImportStorage]: - """ - - Create a new source storage connection to a S3 bucket. - - For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. - - Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. - - After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3SImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[S3SImportStorage]: - """ - - Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Parameters - ---------- - id : int - Import storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3SImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. - - If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. - - Parameters - ---------- - id : int - Import storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[S3SImportStorage]: - """ - - Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). - - Parameters - ---------- - id : int - Import storage ID - - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3SImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate( - self, - *, - regex_filter: typing.Optional[str] = OMIT, - use_blob_urls: typing.Optional[bool] = OMIT, - presign: typing.Optional[bool] = OMIT, - presign_ttl: typing.Optional[int] = OMIT, - recursive_scan: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - bucket: typing.Optional[str] = OMIT, - prefix: typing.Optional[str] = OMIT, - external_id: typing.Optional[str] = OMIT, - role_arn: typing.Optional[str] = OMIT, - region_name: typing.Optional[str] = OMIT, - s3endpoint: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. - - Parameters - ---------- - regex_filter : typing.Optional[str] - Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - - use_blob_urls : typing.Optional[bool] - Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. - - presign : typing.Optional[bool] - Presign URLs for download - - presign_ttl : typing.Optional[int] - Presign TTL in minutes - - recursive_scan : typing.Optional[bool] - Scan recursively - - title : typing.Optional[str] - Storage title - - description : typing.Optional[str] - Storage description - - project : typing.Optional[int] - Project ID - - bucket : typing.Optional[str] - S3 bucket name - - prefix : typing.Optional[str] - S3 bucket prefix - - external_id : typing.Optional[str] - AWS External ID - - role_arn : typing.Optional[str] - AWS Role ARN - - region_name : typing.Optional[str] - AWS Region - - s3endpoint : typing.Optional[str] - S3 Endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s/validate", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def sync( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[S3SImportStorage]: - """ - - Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). - - Parameters - ---------- - id : int - Storage ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[S3SImportStorage] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/types/__init__.py b/src/label_studio_sdk/import_storage/types/__init__.py index f82663649..f995a3c6d 100644 --- a/src/label_studio_sdk/import_storage/types/__init__.py +++ b/src/label_studio_sdk/import_storage/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .import_storage_list_types_response_item import ImportStorageListTypesResponseItem __all__ = ["ImportStorageListTypesResponseItem"] diff --git a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py index 3247ea665..21112358a 100644 --- a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py +++ b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - +from ...core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ImportStorageListTypesResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/jwt_settings/__init__.py b/src/label_studio_sdk/jwt_settings/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/jwt_settings/__init__.py +++ b/src/label_studio_sdk/jwt_settings/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/jwt_settings/client.py b/src/label_studio_sdk/jwt_settings/client.py index 653bddc46..338513a6c 100644 --- a/src/label_studio_sdk/jwt_settings/client.py +++ b/src/label_studio_sdk/jwt_settings/client.py @@ -1,11 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.jwt_settings_response import JwtSettingsResponse -from .raw_client import AsyncRawJwtSettingsClient, RawJwtSettingsClient +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -13,18 +15,7 @@ class JwtSettingsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawJwtSettingsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawJwtSettingsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawJwtSettingsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> JwtSettingsResponse: """ @@ -49,8 +40,24 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Jwt ) client.jwt_settings.get() """ - _response = self._raw_client.get(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -95,29 +102,35 @@ def create( api_token_ttl_days=1, ) """ - _response = self._raw_client.create( - api_tokens_enabled=api_tokens_enabled, - legacy_api_tokens_enabled=legacy_api_tokens_enabled, - api_token_ttl_days=api_token_ttl_days, + _response = self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="POST", + json={ + "api_tokens_enabled": api_tokens_enabled, + "legacy_api_tokens_enabled": legacy_api_tokens_enabled, + "api_token_ttl_days": api_token_ttl_days, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncJwtSettingsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawJwtSettingsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawJwtSettingsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawJwtSettingsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> JwtSettingsResponse: """ @@ -150,8 +163,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -204,10 +233,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - api_tokens_enabled=api_tokens_enabled, - legacy_api_tokens_enabled=legacy_api_tokens_enabled, - api_token_ttl_days=api_token_ttl_days, + _response = await self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="POST", + json={ + "api_tokens_enabled": api_tokens_enabled, + "legacy_api_tokens_enabled": legacy_api_tokens_enabled, + "api_token_ttl_days": api_token_ttl_days, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/jwt_settings/raw_client.py b/src/label_studio_sdk/jwt_settings/raw_client.py deleted file mode 100644 index 33cf55a1b..000000000 --- a/src/label_studio_sdk/jwt_settings/raw_client.py +++ /dev/null @@ -1,212 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.jwt_settings_response import JwtSettingsResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawJwtSettingsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[JwtSettingsResponse]: - """ - Retrieve JWT settings for the currently-active organization. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[JwtSettingsResponse] - JWT settings retrieved successfully - """ - _response = self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - api_tokens_enabled: bool, - legacy_api_tokens_enabled: bool, - api_token_ttl_days: int, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[JwtSettingsResponse]: - """ - Update JWT settings for the currently active organization. - - Parameters - ---------- - api_tokens_enabled : bool - Whether JWT API tokens are enabled - - legacy_api_tokens_enabled : bool - Whether legacy API tokens are enabled - - api_token_ttl_days : int - Number of days before API tokens expire - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[JwtSettingsResponse] - JWT settings updated successfully - """ - _response = self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="POST", - json={ - "api_tokens_enabled": api_tokens_enabled, - "legacy_api_tokens_enabled": legacy_api_tokens_enabled, - "api_token_ttl_days": api_token_ttl_days, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawJwtSettingsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def get( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[JwtSettingsResponse]: - """ - Retrieve JWT settings for the currently-active organization. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[JwtSettingsResponse] - JWT settings retrieved successfully - """ - _response = await self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - api_tokens_enabled: bool, - legacy_api_tokens_enabled: bool, - api_token_ttl_days: int, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[JwtSettingsResponse]: - """ - Update JWT settings for the currently active organization. - - Parameters - ---------- - api_tokens_enabled : bool - Whether JWT API tokens are enabled - - legacy_api_tokens_enabled : bool - Whether legacy API tokens are enabled - - api_token_ttl_days : int - Number of days before API tokens expire - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[JwtSettingsResponse] - JWT settings updated successfully - """ - _response = await self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="POST", - json={ - "api_tokens_enabled": api_tokens_enabled, - "legacy_api_tokens_enabled": legacy_api_tokens_enabled, - "api_token_ttl_days": api_token_ttl_days, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/ml/__init__.py b/src/label_studio_sdk/ml/__init__.py index 613f98967..e0f97600c 100644 --- a/src/label_studio_sdk/ml/__init__.py +++ b/src/label_studio_sdk/ml/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ( MlCreateRequestAuthMethod, MlCreateResponse, diff --git a/src/label_studio_sdk/ml/client.py b/src/label_studio_sdk/ml/client.py index 2284a86d3..36be4dea6 100644 --- a/src/label_studio_sdk/ml/client.py +++ b/src/label_studio_sdk/ml/client.py @@ -1,15 +1,19 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.ml_backend import MlBackend -from .raw_client import AsyncRawMlClient, RawMlClient +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.ml_create_request_auth_method import MlCreateRequestAuthMethod from .types.ml_create_response import MlCreateResponse +from ..core.jsonable_encoder import jsonable_encoder from .types.ml_update_request_auth_method import MlUpdateRequestAuthMethod from .types.ml_update_response import MlUpdateResponse +from ..errors.internal_server_error import InternalServerError +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -17,21 +21,13 @@ class MlClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawMlClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawMlClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawMlClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[MlBackend]: """ @@ -62,8 +58,27 @@ def list( ) client.ml.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -137,20 +152,40 @@ def create( ) client.ml.create() """ - _response = self._raw_client.create( - url=url, - project=project, - is_interactive=is_interactive, - title=title, - description=description, - auth_method=auth_method, - basic_auth_user=basic_auth_user, - basic_auth_pass=basic_auth_pass, - extra_params=extra_params, - timeout=timeout, + _response = self._client_wrapper.httpx_client.request( + "api/ml/", + method="POST", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ @@ -183,8 +218,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -216,8 +267,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -295,21 +356,40 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - url=url, - project=project, - is_interactive=is_interactive, - title=title, - description=description, - auth_method=auth_method, - basic_auth_user=basic_auth_user, - basic_auth_pass=basic_auth_pass, - extra_params=extra_params, - timeout=timeout, + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="PATCH", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def predict_interactive( self, @@ -359,10 +439,26 @@ def predict_interactive( task=1, ) """ - _response = self._raw_client.predict_interactive( - id, task=task, context=context, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/interactive-annotating", + method="POST", + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def train( self, @@ -405,8 +501,35 @@ def train( id=1, ) """ - _response = self._raw_client.train(id, use_ground_truth=use_ground_truth, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/train", + method="POST", + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + if _response.status_code == 500: + raise InternalServerError( + typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -435,27 +558,29 @@ def list_model_versions(self, id: str, *, request_options: typing.Optional[Reque id="id", ) """ - _response = self._raw_client.list_model_versions(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncMlClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawMlClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawMlClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawMlClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[MlBackend]: """ @@ -494,8 +619,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -577,20 +721,40 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - url=url, - project=project, - is_interactive=is_interactive, - title=title, - description=description, - auth_method=auth_method, - basic_auth_user=basic_auth_user, - basic_auth_pass=basic_auth_pass, - extra_params=extra_params, - timeout=timeout, + _response = await self._client_wrapper.httpx_client.request( + "api/ml/", + method="POST", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ @@ -631,8 +795,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -672,8 +852,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -759,21 +949,40 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - url=url, - project=project, - is_interactive=is_interactive, - title=title, - description=description, - auth_method=auth_method, - basic_auth_user=basic_auth_user, - basic_auth_pass=basic_auth_pass, - extra_params=extra_params, - timeout=timeout, + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="PATCH", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def predict_interactive( self, @@ -831,10 +1040,26 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.predict_interactive( - id, task=task, context=context, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/interactive-annotating", + method="POST", + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def train( self, @@ -885,8 +1110,35 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.train(id, use_ground_truth=use_ground_truth, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/train", + method="POST", + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + if _response.status_code == 500: + raise InternalServerError( + typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -923,5 +1175,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list_model_versions(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/ml/raw_client.py b/src/label_studio_sdk/ml/raw_client.py deleted file mode 100644 index 30ebeee47..000000000 --- a/src/label_studio_sdk/ml/raw_client.py +++ /dev/null @@ -1,968 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..errors.internal_server_error import InternalServerError -from ..types.ml_backend import MlBackend -from .types.ml_create_request_auth_method import MlCreateRequestAuthMethod -from .types.ml_create_response import MlCreateResponse -from .types.ml_update_request_auth_method import MlUpdateRequestAuthMethod -from .types.ml_update_response import MlUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawMlClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[MlBackend]]: - """ - - List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). - - - You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[MlBackend]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/ml/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[MlBackend], - parse_obj_as( - type_=typing.List[MlBackend], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - url: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - is_interactive: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, - basic_auth_user: typing.Optional[str] = OMIT, - basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - timeout: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[MlCreateResponse]: - """ - - Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). - - If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). - - If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). - - Parameters - ---------- - url : typing.Optional[str] - ML backend URL - - project : typing.Optional[int] - Project ID - - is_interactive : typing.Optional[bool] - Is interactive - - title : typing.Optional[str] - Title - - description : typing.Optional[str] - Description - - auth_method : typing.Optional[MlCreateRequestAuthMethod] - Auth method - - basic_auth_user : typing.Optional[str] - Basic auth user - - basic_auth_pass : typing.Optional[str] - Basic auth password - - extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Extra parameters - - timeout : typing.Optional[int] - Response model timeout - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[MlCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/ml/", - method="POST", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MlCreateResponse, - parse_obj_as( - type_=MlCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[MlBackend]: - """ - - Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - For more information, see [Machine learning integration](https://labelstud.io/guide/ml). - - Parameters - ---------- - id : int - A unique integer value identifying this ml backend. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[MlBackend] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MlBackend, - parse_obj_as( - type_=MlBackend, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - For more information, see [Machine learning integration](https://labelstud.io/guide/ml). - - Parameters - ---------- - id : int - A unique integer value identifying this ml backend. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - url: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - is_interactive: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, - basic_auth_user: typing.Optional[str] = OMIT, - basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - timeout: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[MlUpdateResponse]: - """ - - Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - For more information, see [Machine learning integration](https://labelstud.io/guide/ml). - - Parameters - ---------- - id : int - A unique integer value identifying this ml backend. - - url : typing.Optional[str] - ML backend URL - - project : typing.Optional[int] - Project ID - - is_interactive : typing.Optional[bool] - Is interactive - - title : typing.Optional[str] - Title - - description : typing.Optional[str] - Description - - auth_method : typing.Optional[MlUpdateRequestAuthMethod] - Auth method - - basic_auth_user : typing.Optional[str] - Basic auth user - - basic_auth_pass : typing.Optional[str] - Basic auth password - - extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Extra parameters - - timeout : typing.Optional[int] - Response model timeout - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[MlUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="PATCH", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MlUpdateResponse, - parse_obj_as( - type_=MlUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def predict_interactive( - self, - id: int, - *, - task: int, - context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Enable interactive pre-annotations for a specific task. - - ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). - - Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). - - You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). - - Parameters - ---------- - id : int - A unique integer value identifying this ML backend. - - task : int - ID of task to annotate - - context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Context for ML model - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/interactive-annotating", - method="POST", - json={ - "task": task, - "context": context, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def train( - self, - id: int, - *, - use_ground_truth: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. - - For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). - - You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - Parameters - ---------- - id : int - A unique integer value identifying this ML backend. - - use_ground_truth : typing.Optional[bool] - Whether to include ground truth annotations in training - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/train", - method="POST", - json={ - "use_ground_truth": use_ground_truth, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - if _response.status_code == 500: - raise InternalServerError( - headers=dict(_response.headers), - body=typing.cast( - str, - parse_obj_as( - type_=str, # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def list_model_versions( - self, id: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[None]: - """ - - Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - Parameters - ---------- - id : str - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawMlClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[MlBackend]]: - """ - - List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). - - - You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[MlBackend]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/ml/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[MlBackend], - parse_obj_as( - type_=typing.List[MlBackend], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - url: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - is_interactive: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, - basic_auth_user: typing.Optional[str] = OMIT, - basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - timeout: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[MlCreateResponse]: - """ - - Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). - - If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). - - If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). - - Parameters - ---------- - url : typing.Optional[str] - ML backend URL - - project : typing.Optional[int] - Project ID - - is_interactive : typing.Optional[bool] - Is interactive - - title : typing.Optional[str] - Title - - description : typing.Optional[str] - Description - - auth_method : typing.Optional[MlCreateRequestAuthMethod] - Auth method - - basic_auth_user : typing.Optional[str] - Basic auth user - - basic_auth_pass : typing.Optional[str] - Basic auth password - - extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Extra parameters - - timeout : typing.Optional[int] - Response model timeout - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[MlCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/ml/", - method="POST", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MlCreateResponse, - parse_obj_as( - type_=MlCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[MlBackend]: - """ - - Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - For more information, see [Machine learning integration](https://labelstud.io/guide/ml). - - Parameters - ---------- - id : int - A unique integer value identifying this ml backend. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[MlBackend] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MlBackend, - parse_obj_as( - type_=MlBackend, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - For more information, see [Machine learning integration](https://labelstud.io/guide/ml). - - Parameters - ---------- - id : int - A unique integer value identifying this ml backend. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - url: typing.Optional[str] = OMIT, - project: typing.Optional[int] = OMIT, - is_interactive: typing.Optional[bool] = OMIT, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, - basic_auth_user: typing.Optional[str] = OMIT, - basic_auth_pass: typing.Optional[str] = OMIT, - extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - timeout: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[MlUpdateResponse]: - """ - - Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - For more information, see [Machine learning integration](https://labelstud.io/guide/ml). - - Parameters - ---------- - id : int - A unique integer value identifying this ml backend. - - url : typing.Optional[str] - ML backend URL - - project : typing.Optional[int] - Project ID - - is_interactive : typing.Optional[bool] - Is interactive - - title : typing.Optional[str] - Title - - description : typing.Optional[str] - Description - - auth_method : typing.Optional[MlUpdateRequestAuthMethod] - Auth method - - basic_auth_user : typing.Optional[str] - Basic auth user - - basic_auth_pass : typing.Optional[str] - Basic auth password - - extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Extra parameters - - timeout : typing.Optional[int] - Response model timeout - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[MlUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="PATCH", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MlUpdateResponse, - parse_obj_as( - type_=MlUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def predict_interactive( - self, - id: int, - *, - task: int, - context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Enable interactive pre-annotations for a specific task. - - ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). - - Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). - - You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). - - Parameters - ---------- - id : int - A unique integer value identifying this ML backend. - - task : int - ID of task to annotate - - context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Context for ML model - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/interactive-annotating", - method="POST", - json={ - "task": task, - "context": context, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def train( - self, - id: int, - *, - use_ground_truth: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. - - For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). - - You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - Parameters - ---------- - id : int - A unique integer value identifying this ML backend. - - use_ground_truth : typing.Optional[bool] - Whether to include ground truth annotations in training - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/train", - method="POST", - json={ - "use_ground_truth": use_ground_truth, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - if _response.status_code == 500: - raise InternalServerError( - headers=dict(_response.headers), - body=typing.cast( - str, - parse_obj_as( - type_=str, # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def list_model_versions( - self, id: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). - - Parameters - ---------- - id : str - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/ml/types/__init__.py b/src/label_studio_sdk/ml/types/__init__.py index 06b8b419d..b308dc9d2 100644 --- a/src/label_studio_sdk/ml/types/__init__.py +++ b/src/label_studio_sdk/ml/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .ml_create_request_auth_method import MlCreateRequestAuthMethod from .ml_create_response import MlCreateResponse from .ml_create_response_auth_method import MlCreateResponseAuthMethod diff --git a/src/label_studio_sdk/ml/types/ml_create_response.py b/src/label_studio_sdk/ml/types/ml_create_response.py index abb00160f..030fa3d3c 100644 --- a/src/label_studio_sdk/ml/types/ml_create_response.py +++ b/src/label_studio_sdk/ml/types/ml_create_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_create_response_auth_method import MlCreateResponseAuthMethod +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class MlCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/ml/types/ml_update_response.py b/src/label_studio_sdk/ml/types/ml_update_response.py index 5220c5bce..f23e5dadd 100644 --- a/src/label_studio_sdk/ml/types/ml_update_response.py +++ b/src/label_studio_sdk/ml/types/ml_update_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_update_response_auth_method import MlUpdateResponseAuthMethod +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class MlUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/model_providers/__init__.py b/src/label_studio_sdk/model_providers/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/model_providers/__init__.py +++ b/src/label_studio_sdk/model_providers/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/model_providers/client.py b/src/label_studio_sdk/model_providers/client.py index c285b4e77..8707af4fd 100644 --- a/src/label_studio_sdk/model_providers/client.py +++ b/src/label_studio_sdk/model_providers/client.py @@ -1,17 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.model_provider_connection import ModelProviderConnection -from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod -from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy -from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from ..types.model_provider_connection_provider import ModelProviderConnectionProvider from ..types.model_provider_connection_scope import ModelProviderConnectionScope -from .raw_client import AsyncRawModelProvidersClient, RawModelProvidersClient +from ..types.model_provider_connection_organization import ( + ModelProviderConnectionOrganization, +) +from ..types.model_provider_connection_created_by import ( + ModelProviderConnectionCreatedBy, +) +import datetime as dt +from ..types.model_provider_connection_budget_reset_period import ( + ModelProviderConnectionBudgetResetPeriod, +) +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.jsonable_encoder import jsonable_encoder +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -19,18 +29,7 @@ class ModelProvidersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawModelProvidersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawModelProvidersClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawModelProvidersClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ModelProviderConnection]: """ @@ -55,8 +54,24 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.model_providers.list() """ - _response = self._raw_client.list(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -138,25 +153,50 @@ def create( provider="OpenAI", ) """ - _response = self._raw_client.create( - provider=provider, - api_key=api_key, - deployment_name=deployment_name, - endpoint=endpoint, - scope=scope, - organization=organization, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - is_internal=is_internal, - budget_limit=budget_limit, - budget_last_reset_date=budget_last_reset_date, - budget_reset_period=budget_reset_period, - budget_total_spent=budget_total_spent, - budget_alert_threshold=budget_alert_threshold, + _response = self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=ModelProviderConnectionOrganization, + direction="write", + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=ModelProviderConnectionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> ModelProviderConnection: """ @@ -186,8 +226,24 @@ def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = Non pk=1, ) """ - _response = self._raw_client.get(pk, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -216,8 +272,18 @@ def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = pk=1, ) """ - _response = self._raw_client.delete(pk, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -304,42 +370,55 @@ def update( provider="OpenAI", ) """ - _response = self._raw_client.update( - pk, - provider=provider, - api_key=api_key, - deployment_name=deployment_name, - endpoint=endpoint, - scope=scope, - organization=organization, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - is_internal=is_internal, - budget_limit=budget_limit, - budget_last_reset_date=budget_last_reset_date, - budget_reset_period=budget_reset_period, - budget_total_spent=budget_total_spent, - budget_alert_threshold=budget_alert_threshold, + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="PATCH", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=ModelProviderConnectionOrganization, + direction="write", + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=ModelProviderConnectionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncModelProvidersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawModelProvidersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawModelProvidersClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawModelProvidersClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( self, *, request_options: typing.Optional[RequestOptions] = None @@ -374,8 +453,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -465,25 +560,50 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - provider=provider, - api_key=api_key, - deployment_name=deployment_name, - endpoint=endpoint, - scope=scope, - organization=organization, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - is_internal=is_internal, - budget_limit=budget_limit, - budget_last_reset_date=budget_last_reset_date, - budget_reset_period=budget_reset_period, - budget_total_spent=budget_total_spent, - budget_alert_threshold=budget_alert_threshold, + _response = await self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=ModelProviderConnectionOrganization, + direction="write", + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=ModelProviderConnectionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> ModelProviderConnection: """ @@ -521,8 +641,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(pk, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -559,8 +695,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(pk, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -655,23 +801,47 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - pk, - provider=provider, - api_key=api_key, - deployment_name=deployment_name, - endpoint=endpoint, - scope=scope, - organization=organization, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - is_internal=is_internal, - budget_limit=budget_limit, - budget_last_reset_date=budget_last_reset_date, - budget_reset_period=budget_reset_period, - budget_total_spent=budget_total_spent, - budget_alert_threshold=budget_alert_threshold, + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="PATCH", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=ModelProviderConnectionOrganization, + direction="write", + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=ModelProviderConnectionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/model_providers/raw_client.py b/src/label_studio_sdk/model_providers/raw_client.py deleted file mode 100644 index e481717d1..000000000 --- a/src/label_studio_sdk/model_providers/raw_client.py +++ /dev/null @@ -1,706 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..core.serialization import convert_and_respect_annotation_metadata -from ..types.model_provider_connection import ModelProviderConnection -from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod -from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy -from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization -from ..types.model_provider_connection_provider import ModelProviderConnectionProvider -from ..types.model_provider_connection_scope import ModelProviderConnectionScope - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawModelProvidersClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[ModelProviderConnection]]: - """ - Get all model provider connections created by the user in the current organization. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[ModelProviderConnection]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ModelProviderConnection], - parse_obj_as( - type_=typing.List[ModelProviderConnection], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - provider: ModelProviderConnectionProvider, - api_key: typing.Optional[str] = OMIT, - deployment_name: typing.Optional[str] = OMIT, - endpoint: typing.Optional[str] = OMIT, - scope: typing.Optional[ModelProviderConnectionScope] = OMIT, - organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, - created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - is_internal: typing.Optional[bool] = OMIT, - budget_limit: typing.Optional[float] = OMIT, - budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, - budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, - budget_total_spent: typing.Optional[float] = OMIT, - budget_alert_threshold: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ModelProviderConnection]: - """ - Create a new model provider connection. - - Parameters - ---------- - provider : ModelProviderConnectionProvider - - api_key : typing.Optional[str] - - deployment_name : typing.Optional[str] - - endpoint : typing.Optional[str] - - scope : typing.Optional[ModelProviderConnectionScope] - - organization : typing.Optional[ModelProviderConnectionOrganization] - - created_by : typing.Optional[ModelProviderConnectionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - is_internal : typing.Optional[bool] - Whether the model provider connection is internal, not visible to the user. - - budget_limit : typing.Optional[float] - Budget limit for the model provider connection (null if unlimited) - - budget_last_reset_date : typing.Optional[dt.datetime] - Date and time the budget was last reset - - budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] - Budget reset period for the model provider connection (null if not reset) - - budget_total_spent : typing.Optional[float] - Tracked total budget spent for the given provider connection within the current budget period - - budget_alert_threshold : typing.Optional[float] - Budget alert threshold for the given provider connection - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ModelProviderConnection] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="POST", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, pk: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[ModelProviderConnection]: - """ - Get a model provider connection by ID. - - Parameters - ---------- - pk : int - Model Provider Connection ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ModelProviderConnection] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - Delete a model provider connection by ID. - - Parameters - ---------- - pk : int - Model Provider Connection ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - pk: int, - *, - provider: ModelProviderConnectionProvider, - api_key: typing.Optional[str] = OMIT, - deployment_name: typing.Optional[str] = OMIT, - endpoint: typing.Optional[str] = OMIT, - scope: typing.Optional[ModelProviderConnectionScope] = OMIT, - organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, - created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - is_internal: typing.Optional[bool] = OMIT, - budget_limit: typing.Optional[float] = OMIT, - budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, - budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, - budget_total_spent: typing.Optional[float] = OMIT, - budget_alert_threshold: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ModelProviderConnection]: - """ - Update a model provider connection by ID. - - Parameters - ---------- - pk : int - Model Provider Connection ID - - provider : ModelProviderConnectionProvider - - api_key : typing.Optional[str] - - deployment_name : typing.Optional[str] - - endpoint : typing.Optional[str] - - scope : typing.Optional[ModelProviderConnectionScope] - - organization : typing.Optional[ModelProviderConnectionOrganization] - - created_by : typing.Optional[ModelProviderConnectionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - is_internal : typing.Optional[bool] - Whether the model provider connection is internal, not visible to the user. - - budget_limit : typing.Optional[float] - Budget limit for the model provider connection (null if unlimited) - - budget_last_reset_date : typing.Optional[dt.datetime] - Date and time the budget was last reset - - budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] - Budget reset period for the model provider connection (null if not reset) - - budget_total_spent : typing.Optional[float] - Tracked total budget spent for the given provider connection within the current budget period - - budget_alert_threshold : typing.Optional[float] - Budget alert threshold for the given provider connection - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ModelProviderConnection] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="PATCH", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawModelProvidersClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[ModelProviderConnection]]: - """ - Get all model provider connections created by the user in the current organization. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[ModelProviderConnection]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ModelProviderConnection], - parse_obj_as( - type_=typing.List[ModelProviderConnection], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - provider: ModelProviderConnectionProvider, - api_key: typing.Optional[str] = OMIT, - deployment_name: typing.Optional[str] = OMIT, - endpoint: typing.Optional[str] = OMIT, - scope: typing.Optional[ModelProviderConnectionScope] = OMIT, - organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, - created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - is_internal: typing.Optional[bool] = OMIT, - budget_limit: typing.Optional[float] = OMIT, - budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, - budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, - budget_total_spent: typing.Optional[float] = OMIT, - budget_alert_threshold: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ModelProviderConnection]: - """ - Create a new model provider connection. - - Parameters - ---------- - provider : ModelProviderConnectionProvider - - api_key : typing.Optional[str] - - deployment_name : typing.Optional[str] - - endpoint : typing.Optional[str] - - scope : typing.Optional[ModelProviderConnectionScope] - - organization : typing.Optional[ModelProviderConnectionOrganization] - - created_by : typing.Optional[ModelProviderConnectionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - is_internal : typing.Optional[bool] - Whether the model provider connection is internal, not visible to the user. - - budget_limit : typing.Optional[float] - Budget limit for the model provider connection (null if unlimited) - - budget_last_reset_date : typing.Optional[dt.datetime] - Date and time the budget was last reset - - budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] - Budget reset period for the model provider connection (null if not reset) - - budget_total_spent : typing.Optional[float] - Tracked total budget spent for the given provider connection within the current budget period - - budget_alert_threshold : typing.Optional[float] - Budget alert threshold for the given provider connection - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ModelProviderConnection] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="POST", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, pk: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[ModelProviderConnection]: - """ - Get a model provider connection by ID. - - Parameters - ---------- - pk : int - Model Provider Connection ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ModelProviderConnection] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, pk: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Delete a model provider connection by ID. - - Parameters - ---------- - pk : int - Model Provider Connection ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - pk: int, - *, - provider: ModelProviderConnectionProvider, - api_key: typing.Optional[str] = OMIT, - deployment_name: typing.Optional[str] = OMIT, - endpoint: typing.Optional[str] = OMIT, - scope: typing.Optional[ModelProviderConnectionScope] = OMIT, - organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, - created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - is_internal: typing.Optional[bool] = OMIT, - budget_limit: typing.Optional[float] = OMIT, - budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, - budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, - budget_total_spent: typing.Optional[float] = OMIT, - budget_alert_threshold: typing.Optional[float] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ModelProviderConnection]: - """ - Update a model provider connection by ID. - - Parameters - ---------- - pk : int - Model Provider Connection ID - - provider : ModelProviderConnectionProvider - - api_key : typing.Optional[str] - - deployment_name : typing.Optional[str] - - endpoint : typing.Optional[str] - - scope : typing.Optional[ModelProviderConnectionScope] - - organization : typing.Optional[ModelProviderConnectionOrganization] - - created_by : typing.Optional[ModelProviderConnectionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - is_internal : typing.Optional[bool] - Whether the model provider connection is internal, not visible to the user. - - budget_limit : typing.Optional[float] - Budget limit for the model provider connection (null if unlimited) - - budget_last_reset_date : typing.Optional[dt.datetime] - Date and time the budget was last reset - - budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] - Budget reset period for the model provider connection (null if not reset) - - budget_total_spent : typing.Optional[float] - Tracked total budget spent for the given provider connection within the current budget period - - budget_alert_threshold : typing.Optional[float] - Budget alert threshold for the given provider connection - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ModelProviderConnection] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="PATCH", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/predictions/__init__.py b/src/label_studio_sdk/predictions/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/predictions/__init__.py +++ b/src/label_studio_sdk/predictions/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/predictions/client.py b/src/label_studio_sdk/predictions/client.py index f8f94ee1d..f1f84734d 100644 --- a/src/label_studio_sdk/predictions/client.py +++ b/src/label_studio_sdk/predictions/client.py @@ -1,11 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.prediction import Prediction -from .raw_client import AsyncRawPredictionsClient, RawPredictionsClient +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.jsonable_encoder import jsonable_encoder +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -13,18 +16,7 @@ class PredictionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawPredictionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawPredictionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawPredictionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( self, @@ -68,8 +60,28 @@ def list( ) client.predictions.list() """ - _response = self._raw_client.list(task=task, project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -145,10 +157,34 @@ def create( model_version="yolo-v8", ) """ - _response = self._raw_client.create( - task=task, result=result, score=score, model_version=model_version, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + "api/predictions/", + method="POST", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ @@ -181,8 +217,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -212,8 +264,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -287,26 +349,39 @@ def update( model_version="yolo-v8", ) """ - _response = self._raw_client.update( - id, task=task, result=result, score=score, model_version=model_version, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncPredictionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawPredictionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawPredictionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawPredictionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( self, @@ -358,8 +433,28 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(task=task, project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -443,10 +538,34 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - task=task, result=result, score=score, model_version=model_version, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + "api/predictions/", + method="POST", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ @@ -487,8 +606,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -526,8 +661,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -609,7 +754,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, task=task, result=result, score=score, model_version=model_version, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/predictions/raw_client.py b/src/label_studio_sdk/predictions/raw_client.py deleted file mode 100644 index 7fc9c7107..000000000 --- a/src/label_studio_sdk/predictions/raw_client.py +++ /dev/null @@ -1,573 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.prediction import Prediction - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawPredictionsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, - *, - task: typing.Optional[int] = None, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[typing.List[Prediction]]: - """ - - Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). - - The terms "predictions" and pre-annotations" are used interchangeably. - - Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). - - To import predictions via the API, see [Create prediction](create). - - Parameters - ---------- - task : typing.Optional[int] - Filter predictions by task ID - - project : typing.Optional[int] - Filter predictions by project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Prediction]] - Predictions list - """ - _response = self._client_wrapper.httpx_client.request( - "api/predictions/", - method="GET", - params={ - "task": task, - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Prediction], - parse_obj_as( - type_=typing.List[Prediction], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - score: typing.Optional[float] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Prediction]: - """ - - If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. - - To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. - - #### JSON format for predictions - Label Studio JSON format for pre-annotations must contain two sections: - * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. - * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. - - For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) - - Parameters - ---------- - task : typing.Optional[int] - Task ID for which the prediction is created - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) - - score : typing.Optional[float] - Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. - - model_version : typing.Optional[str] - Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Prediction] - Created prediction - """ - _response = self._client_wrapper.httpx_client.request( - "api/predictions/", - method="POST", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Prediction]: - """ - - Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). - - For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). - - Parameters - ---------- - id : int - Prediction ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Prediction] - Prediction details - """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a prediction. To find the prediction ID, use [List predictions](list). - - Parameters - ---------- - id : int - Prediction ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - score: typing.Optional[float] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Prediction]: - """ - - Update a prediction. To find the prediction ID, use [List predictions](list). - - For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). - - Parameters - ---------- - id : int - Prediction ID - - task : typing.Optional[int] - Task ID for which the prediction is created - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) - - score : typing.Optional[float] - Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. - - model_version : typing.Optional[str] - Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Prediction] - Updated prediction - """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawPredictionsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, - *, - task: typing.Optional[int] = None, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[typing.List[Prediction]]: - """ - - Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). - - The terms "predictions" and pre-annotations" are used interchangeably. - - Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). - - To import predictions via the API, see [Create prediction](create). - - Parameters - ---------- - task : typing.Optional[int] - Filter predictions by task ID - - project : typing.Optional[int] - Filter predictions by project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Prediction]] - Predictions list - """ - _response = await self._client_wrapper.httpx_client.request( - "api/predictions/", - method="GET", - params={ - "task": task, - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Prediction], - parse_obj_as( - type_=typing.List[Prediction], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - score: typing.Optional[float] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Prediction]: - """ - - If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. - - To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. - - #### JSON format for predictions - Label Studio JSON format for pre-annotations must contain two sections: - * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. - * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. - - For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) - - Parameters - ---------- - task : typing.Optional[int] - Task ID for which the prediction is created - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) - - score : typing.Optional[float] - Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. - - model_version : typing.Optional[str] - Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Prediction] - Created prediction - """ - _response = await self._client_wrapper.httpx_client.request( - "api/predictions/", - method="POST", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Prediction]: - """ - - Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). - - For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). - - Parameters - ---------- - id : int - Prediction ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Prediction] - Prediction details - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a prediction. To find the prediction ID, use [List predictions](list). - - Parameters - ---------- - id : int - Prediction ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - task: typing.Optional[int] = OMIT, - result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, - score: typing.Optional[float] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Prediction]: - """ - - Update a prediction. To find the prediction ID, use [List predictions](list). - - For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). - - Parameters - ---------- - id : int - Prediction ID - - task : typing.Optional[int] - Task ID for which the prediction is created - - result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] - Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) - - score : typing.Optional[float] - Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. - - model_version : typing.Optional[str] - Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Prediction] - Updated prediction - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/__init__.py b/src/label_studio_sdk/projects/__init__.py index 1cdd91f96..024a38ecc 100644 --- a/src/label_studio_sdk/projects/__init__.py +++ b/src/label_studio_sdk/projects/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ProjectsCreateResponse, ProjectsImportTasksResponse, ProjectsListResponse, ProjectsUpdateResponse from . import exports, pauses from .exports import ExportsConvertResponse, ExportsListFormatsResponseItem diff --git a/src/label_studio_sdk/projects/client.py b/src/label_studio_sdk/projects/client.py index 5a1bf9e7d..de39e41c6 100644 --- a/src/label_studio_sdk/projects/client.py +++ b/src/label_studio_sdk/projects/client.py @@ -1,18 +1,26 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.pagination import AsyncPager, SyncPager +from ..core.client_wrapper import SyncClientWrapper +from .pauses.client import PausesClient +from .exports.client import ExportsClient from ..core.request_options import RequestOptions +from ..core.pagination import SyncPager from ..types.project import Project -from ..types.project_label_config import ProjectLabelConfig -from .exports.client import AsyncExportsClient, ExportsClient -from .pauses.client import AsyncPausesClient, PausesClient -from .raw_client import AsyncRawProjectsClient, RawProjectsClient +from .types.projects_list_response import ProjectsListResponse +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.projects_create_response import ProjectsCreateResponse -from .types.projects_import_tasks_response import ProjectsImportTasksResponse +from ..core.jsonable_encoder import jsonable_encoder from .types.projects_update_response import ProjectsUpdateResponse +from .types.projects_import_tasks_response import ProjectsImportTasksResponse +from ..errors.bad_request_error import BadRequestError +from ..types.project_label_config import ProjectLabelConfig +from ..core.client_wrapper import AsyncClientWrapper +from .pauses.client import AsyncPausesClient +from .exports.client import AsyncExportsClient +from ..core.pagination import AsyncPager # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -20,21 +28,9 @@ class ProjectsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawProjectsClient(client_wrapper=client_wrapper) - self.pauses = PausesClient(client_wrapper=client_wrapper) - - self.exports = ExportsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawProjectsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawProjectsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.pauses = PausesClient(client_wrapper=self._client_wrapper) + self.exports = ExportsClient(client_wrapper=self._client_wrapper) def list( self, @@ -101,15 +97,45 @@ def list( for page in response.iter_pages(): yield page """ - return self._raw_client.list( - ordering=ordering, - ids=ids, - title=title, - page=page, - page_size=page_size, - workspaces=workspaces, + page = page if page is not None else 1 + _response = self._client_wrapper.httpx_client.request( + "api/projects/", + method="GET", + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + "workspaces": workspaces, + }, request_options=request_options, ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) + _has_next = True + _get_next = lambda: self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + workspaces=workspaces, + request_options=request_options, + ) + _items = _parsed_response.results + return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -206,25 +232,45 @@ def create( ) client.projects.create() """ - _response = self._raw_client.create( - title=title, - description=description, - label_config=label_config, - expert_instruction=expert_instruction, - show_instruction=show_instruction, - show_skip_button=show_skip_button, - enable_empty_annotation=enable_empty_annotation, - show_annotation_history=show_annotation_history, - reveal_preannotations_interactively=reveal_preannotations_interactively, - show_collab_predictions=show_collab_predictions, - maximum_annotations=maximum_annotations, - color=color, - control_weights=control_weights, - workspace=workspace, - model_version=model_version, + _response = self._client_wrapper.httpx_client.request( + "api/projects/", + method="POST", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Project: """ @@ -254,8 +300,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -287,8 +349,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -395,26 +467,45 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - title=title, - description=description, - label_config=label_config, - expert_instruction=expert_instruction, - show_instruction=show_instruction, - show_skip_button=show_skip_button, - enable_empty_annotation=enable_empty_annotation, - show_annotation_history=show_annotation_history, - reveal_preannotations_interactively=reveal_preannotations_interactively, - show_collab_predictions=show_collab_predictions, - maximum_annotations=maximum_annotations, - color=color, - control_weights=control_weights, - workspace=workspace, - model_version=model_version, + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def import_tasks( self, @@ -513,18 +604,48 @@ def import_tasks( request=[{"key": "value"}], ) """ - _response = self._raw_client.import_tasks( - id, - request=request, - commit_to_project=commit_to_project, - return_task_ids=return_task_ids, - preannotated_from_fields=preannotated_from_fields, + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/import", + method="POST", + params={ + "commit_to_project": commit_to_project, + "return_task_ids": return_task_ids, + "preannotated_from_fields": preannotated_from_fields, + }, + json=request, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def validate_config( - self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + *, + label_config: str, + request_options: typing.Optional[RequestOptions] = None, ) -> ProjectLabelConfig: """ @@ -560,27 +681,35 @@ def validate_config( label_config="label_config", ) """ - _response = self._raw_client.validate_config(id, label_config=label_config, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/validate/", + method="POST", + json={ + "label_config": label_config, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncProjectsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawProjectsClient(client_wrapper=client_wrapper) - self.pauses = AsyncPausesClient(client_wrapper=client_wrapper) - - self.exports = AsyncExportsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawProjectsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawProjectsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.pauses = AsyncPausesClient(client_wrapper=self._client_wrapper) + self.exports = AsyncExportsClient(client_wrapper=self._client_wrapper) async def list( self, @@ -648,7 +777,6 @@ async def main() -> None: response = await client.projects.list() async for item in response: yield item - # alternatively, you can paginate page-by-page async for page in response.iter_pages(): yield page @@ -656,15 +784,45 @@ async def main() -> None: asyncio.run(main()) """ - return await self._raw_client.list( - ordering=ordering, - ids=ids, - title=title, - page=page, - page_size=page_size, - workspaces=workspaces, + page = page if page is not None else 1 + _response = await self._client_wrapper.httpx_client.request( + "api/projects/", + method="GET", + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + "workspaces": workspaces, + }, request_options=request_options, ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) + _has_next = True + _get_next = lambda: self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + workspaces=workspaces, + request_options=request_options, + ) + _items = _parsed_response.results + return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -769,25 +927,45 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - title=title, - description=description, - label_config=label_config, - expert_instruction=expert_instruction, - show_instruction=show_instruction, - show_skip_button=show_skip_button, - enable_empty_annotation=enable_empty_annotation, - show_annotation_history=show_annotation_history, - reveal_preannotations_interactively=reveal_preannotations_interactively, - show_collab_predictions=show_collab_predictions, - maximum_annotations=maximum_annotations, - color=color, - control_weights=control_weights, - workspace=workspace, - model_version=model_version, + _response = await self._client_wrapper.httpx_client.request( + "api/projects/", + method="POST", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Project: """ @@ -825,8 +1003,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -866,8 +1060,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -982,26 +1186,45 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - title=title, - description=description, - label_config=label_config, - expert_instruction=expert_instruction, - show_instruction=show_instruction, - show_skip_button=show_skip_button, - enable_empty_annotation=enable_empty_annotation, - show_annotation_history=show_annotation_history, - reveal_preannotations_interactively=reveal_preannotations_interactively, - show_collab_predictions=show_collab_predictions, - maximum_annotations=maximum_annotations, - color=color, - control_weights=control_weights, - workspace=workspace, - model_version=model_version, + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def import_tasks( self, @@ -1108,18 +1331,48 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.import_tasks( - id, - request=request, - commit_to_project=commit_to_project, - return_task_ids=return_task_ids, - preannotated_from_fields=preannotated_from_fields, + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/import", + method="POST", + params={ + "commit_to_project": commit_to_project, + "return_task_ids": return_task_ids, + "preannotated_from_fields": preannotated_from_fields, + }, + json=request, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def validate_config( - self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + *, + label_config: str, + request_options: typing.Optional[RequestOptions] = None, ) -> ProjectLabelConfig: """ @@ -1163,7 +1416,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.validate_config( - id, label_config=label_config, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/validate/", + method="POST", + json={ + "label_config": label_config, + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/projects/exports/__init__.py b/src/label_studio_sdk/projects/exports/__init__.py index 8366b6166..e251c825b 100644 --- a/src/label_studio_sdk/projects/exports/__init__.py +++ b/src/label_studio_sdk/projects/exports/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ExportsConvertResponse, ExportsListFormatsResponseItem __all__ = ["ExportsConvertResponse", "ExportsListFormatsResponseItem"] diff --git a/src/label_studio_sdk/projects/exports/client.py b/src/label_studio_sdk/projects/exports/client.py index 1703c09ec..1dcfcc35d 100644 --- a/src/label_studio_sdk/projects/exports/client.py +++ b/src/label_studio_sdk/projects/exports/client.py @@ -1,22 +1,26 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions -from ...types.annotation_filter_options import AnnotationFilterOptions -from ...types.converted_format import ConvertedFormat +from ...core.jsonable_encoder import jsonable_encoder +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem +from ...core.pydantic_utilities import parse_obj_as from ...types.export import Export -from ...types.export_format import ExportFormat -from ...types.export_snapshot import ExportSnapshot +from ...types.user_simple import UserSimple +import datetime as dt from ...types.export_snapshot_status import ExportSnapshotStatus -from ...types.serialization_options import SerializationOptions +from ...types.converted_format import ConvertedFormat from ...types.task_filter_options import TaskFilterOptions -from ...types.user_simple import UserSimple -from .raw_client import AsyncRawExportsClient, RawExportsClient +from ...types.annotation_filter_options import AnnotationFilterOptions +from ...types.serialization_options import SerializationOptions +from ...types.export_snapshot import ExportSnapshot +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.export_format import ExportFormat from .types.exports_convert_response import ExportsConvertResponse -from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -24,18 +28,7 @@ class ExportsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawExportsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawExportsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawExportsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def download_sync( self, @@ -90,20 +83,33 @@ def download_sync( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Returns - ------- + Yields + ------ typing.Iterator[bytes] Exported data in binary format """ - with self._raw_client.download_sync( - id, - export_type=export_type, - download_all_tasks=download_all_tasks, - download_resources=download_resources, - ids=ids, + with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(id)}/export", + method="GET", + params={ + "export_type": export_type, + "download_all_tasks": download_all_tasks, + "download_resources": download_resources, + "ids": ids, + }, request_options=request_options, - ) as r: - yield from r.data + ) as _response: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + for _chunk in _response.iter_bytes(chunk_size=_chunk_size): + yield _chunk + return + _response.read() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def list_formats( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -138,10 +144,31 @@ def list_formats( id=1, ) """ - _response = self._raw_client.list_formats(id, request_options=request_options) - return _response.data - - def list(self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Export]: + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ExportsListFormatsResponseItem], + parse_obj_as( + type_=typing.List[ExportsListFormatsResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def list( + self, + project_id: int, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[Export]: """ Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -172,8 +199,24 @@ def list(self, project_id: int, *, request_options: typing.Optional[RequestOptio project_id=1, ) """ - _response = self._raw_client.list(project_id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -251,23 +294,57 @@ def create( project_id=1, ) """ - _response = self._raw_client.create( - project_id, - title=title, - id=id, - created_by=created_by, - created_at=created_at, - finished_at=finished_at, - status=status, - md5=md5, - counters=counters, - converted_formats=converted_formats, - task_filter_options=task_filter_options, - annotation_filter_options=annotation_filter_options, - serialization_options=serialization_options, + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="POST", + json={ + "title": title, + "id": id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, + annotation=typing.Sequence[ConvertedFormat], + direction="write", + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, + annotation=TaskFilterOptions, + direction="write", + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, + annotation=AnnotationFilterOptions, + direction="write", + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, + annotation=SerializationOptions, + direction="write", + ), + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ExportSnapshot, + parse_obj_as( + type_=ExportSnapshot, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def download( self, @@ -299,18 +376,37 @@ def download( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Returns - ------- + Yields + ------ typing.Iterator[bytes] Exported data in binary format """ - with self._raw_client.download( - project_id, export_pk, export_type=export_type, request_options=request_options - ) as r: - yield from r.data + with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", + method="GET", + params={ + "exportType": export_type, + }, + request_options=request_options, + ) as _response: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + for _chunk in _response.iter_bytes(chunk_size=_chunk_size): + yield _chunk + return + _response.read() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + self, + project_id: int, + export_pk: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> Export: """ @@ -348,11 +444,31 @@ def get( export_pk="export_pk", ) """ - _response = self._raw_client.get(project_id, export_pk, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + self, + project_id: int, + export_pk: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ @@ -387,8 +503,18 @@ def delete( export_pk="export_pk", ) """ - _response = self._raw_client.delete(project_id, export_pk, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def convert( self, @@ -442,30 +568,37 @@ def convert( export_pk="export_pk", ) """ - _response = self._raw_client.convert( - project_id, - export_pk, - export_type=export_type, - download_resources=download_resources, + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", + method="POST", + json={ + "export_type": export_type, + "download_resources": download_resources, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ExportsConvertResponse, + parse_obj_as( + type_=ExportsConvertResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncExportsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawExportsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawExportsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawExportsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def download_sync( self, @@ -520,21 +653,33 @@ async def download_sync( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Returns - ------- + Yields + ------ typing.AsyncIterator[bytes] Exported data in binary format """ - async with self._raw_client.download_sync( - id, - export_type=export_type, - download_all_tasks=download_all_tasks, - download_resources=download_resources, - ids=ids, + async with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(id)}/export", + method="GET", + params={ + "export_type": export_type, + "download_all_tasks": download_all_tasks, + "download_resources": download_resources, + "ids": ids, + }, request_options=request_options, - ) as r: - async for _chunk in r.data: - yield _chunk + ) as _response: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size): + yield _chunk + return + await _response.aread() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def list_formats( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -577,11 +722,30 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list_formats(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ExportsListFormatsResponseItem], + parse_obj_as( + type_=typing.List[ExportsListFormatsResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def list( - self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + project_id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[Export]: """ @@ -621,8 +785,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project_id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -708,23 +888,57 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - project_id, - title=title, - id=id, - created_by=created_by, - created_at=created_at, - finished_at=finished_at, - status=status, - md5=md5, - counters=counters, - converted_formats=converted_formats, - task_filter_options=task_filter_options, - annotation_filter_options=annotation_filter_options, - serialization_options=serialization_options, + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="POST", + json={ + "title": title, + "id": id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, + annotation=typing.Sequence[ConvertedFormat], + direction="write", + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, + annotation=TaskFilterOptions, + direction="write", + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, + annotation=AnnotationFilterOptions, + direction="write", + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, + annotation=SerializationOptions, + direction="write", + ), + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ExportSnapshot, + parse_obj_as( + type_=ExportSnapshot, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def download( self, @@ -756,19 +970,37 @@ async def download( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Returns - ------- + Yields + ------ typing.AsyncIterator[bytes] Exported data in binary format """ - async with self._raw_client.download( - project_id, export_pk, export_type=export_type, request_options=request_options - ) as r: - async for _chunk in r.data: - yield _chunk + async with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", + method="GET", + params={ + "exportType": export_type, + }, + request_options=request_options, + ) as _response: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size): + yield _chunk + return + await _response.aread() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + self, + project_id: int, + export_pk: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> Export: """ @@ -814,11 +1046,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(project_id, export_pk, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + self, + project_id: int, + export_pk: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ @@ -861,8 +1113,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(project_id, export_pk, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def convert( self, @@ -924,11 +1186,29 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.convert( - project_id, - export_pk, - export_type=export_type, - download_resources=download_resources, + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", + method="POST", + json={ + "export_type": export_type, + "download_resources": download_resources, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ExportsConvertResponse, + parse_obj_as( + type_=ExportsConvertResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/projects/exports/raw_client.py b/src/label_studio_sdk/projects/exports/raw_client.py deleted file mode 100644 index e6982638e..000000000 --- a/src/label_studio_sdk/projects/exports/raw_client.py +++ /dev/null @@ -1,1038 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import contextlib -import datetime as dt -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...core.serialization import convert_and_respect_annotation_metadata -from ...types.annotation_filter_options import AnnotationFilterOptions -from ...types.converted_format import ConvertedFormat -from ...types.export import Export -from ...types.export_format import ExportFormat -from ...types.export_snapshot import ExportSnapshot -from ...types.export_snapshot_status import ExportSnapshotStatus -from ...types.serialization_options import SerializationOptions -from ...types.task_filter_options import TaskFilterOptions -from ...types.user_simple import UserSimple -from .types.exports_convert_response import ExportsConvertResponse -from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawExportsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - @contextlib.contextmanager - def download_sync( - self, - id: int, - *, - export_type: typing.Optional[str] = None, - download_all_tasks: typing.Optional[bool] = None, - download_resources: typing.Optional[bool] = None, - ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> typing.Iterator[HttpResponse[typing.Iterator[bytes]]]: - """ - - If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. - Export annotated tasks as a file in a specific format. - For example, to export JSON annotations for a project to a file called `annotations.json`, - run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' - ``` - To export all tasks, including skipped tasks and others without annotations, run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' - ``` - To export specific tasks with IDs of 123 and 345, run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' - ``` - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - export_type : typing.Optional[str] - Selected export format (JSON by default) - - download_all_tasks : typing.Optional[bool] - - If true, download all tasks regardless of status. If false, download only annotated tasks. - - download_resources : typing.Optional[bool] - - If true, download all resource files such as images, audio, and others relevant to the tasks. - - ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] - - Specify a list of task IDs to retrieve only the details for those tasks. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - - Returns - ------- - typing.Iterator[HttpResponse[typing.Iterator[bytes]]] - Exported data in binary format - """ - with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(id)}/export", - method="GET", - params={ - "export_type": export_type, - "download_all_tasks": download_all_tasks, - "download_resources": download_resources, - "ids": ids, - }, - request_options=request_options, - ) as _response: - - def _stream() -> HttpResponse[typing.Iterator[bytes]]: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - return HttpResponse( - response=_response, data=(_chunk for _chunk in _response.iter_bytes(chunk_size=_chunk_size)) - ) - _response.read() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError( - status_code=_response.status_code, headers=dict(_response.headers), body=_response.text - ) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - yield _stream() - - def list_formats( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[ExportsListFormatsResponseItem]]: - """ - - Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[ExportsListFormatsResponseItem]] - Export formats - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ExportsListFormatsResponseItem], - parse_obj_as( - type_=typing.List[ExportsListFormatsResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def list( - self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[Export]]: - """ - - Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Included in the response is information about each snapshot, such as who created it and what format it is in. - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Export]] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Export], - parse_obj_as( - type_=typing.List[Export], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - project_id: int, - *, - title: typing.Optional[str] = OMIT, - id: typing.Optional[int] = OMIT, - created_by: typing.Optional[UserSimple] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - finished_at: typing.Optional[dt.datetime] = OMIT, - status: typing.Optional[ExportSnapshotStatus] = OMIT, - md5: typing.Optional[str] = OMIT, - counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, - task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, - annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, - serialization_options: typing.Optional[SerializationOptions] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ExportSnapshot]: - """ - - Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. - - For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - title : typing.Optional[str] - - id : typing.Optional[int] - - created_by : typing.Optional[UserSimple] - - created_at : typing.Optional[dt.datetime] - Creation time - - finished_at : typing.Optional[dt.datetime] - Complete or fail time - - status : typing.Optional[ExportSnapshotStatus] - - md5 : typing.Optional[str] - - counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - - converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] - - task_filter_options : typing.Optional[TaskFilterOptions] - - annotation_filter_options : typing.Optional[AnnotationFilterOptions] - - serialization_options : typing.Optional[SerializationOptions] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ExportSnapshot] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="POST", - json={ - "title": title, - "id": id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=UserSimple, direction="write" - ), - "created_at": created_at, - "finished_at": finished_at, - "status": status, - "md5": md5, - "counters": counters, - "converted_formats": convert_and_respect_annotation_metadata( - object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" - ), - "task_filter_options": convert_and_respect_annotation_metadata( - object_=task_filter_options, annotation=TaskFilterOptions, direction="write" - ), - "annotation_filter_options": convert_and_respect_annotation_metadata( - object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" - ), - "serialization_options": convert_and_respect_annotation_metadata( - object_=serialization_options, annotation=SerializationOptions, direction="write" - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ExportSnapshot, - parse_obj_as( - type_=ExportSnapshot, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - @contextlib.contextmanager - def download( - self, - project_id: int, - export_pk: str, - *, - export_type: typing.Optional[ExportFormat] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> typing.Iterator[HttpResponse[typing.Iterator[bytes]]]: - """ - - Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - - You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - export_type : typing.Optional[ExportFormat] - Selected export format. JSON is available by default. For other formats, you need to convert the export first. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - - Returns - ------- - typing.Iterator[HttpResponse[typing.Iterator[bytes]]] - Exported data in binary format - """ - with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", - method="GET", - params={ - "exportType": export_type, - }, - request_options=request_options, - ) as _response: - - def _stream() -> HttpResponse[typing.Iterator[bytes]]: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - return HttpResponse( - response=_response, data=(_chunk for _chunk in _response.iter_bytes(chunk_size=_chunk_size)) - ) - _response.read() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError( - status_code=_response.status_code, headers=dict(_response.headers), body=_response.text - ) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - yield _stream() - - def get( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[Export]: - """ - - Retrieve information about a specific export file (snapshot). - - You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). - - You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Export] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Export, - parse_obj_as( - type_=Export, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[None]: - """ - - Delete an export file by specified export ID. - - You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def convert( - self, - project_id: int, - export_pk: str, - *, - export_type: typing.Optional[ExportFormat] = OMIT, - download_resources: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ExportsConvertResponse]: - """ - - You can use this to convert an export snapshot into the selected format. - - To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - - You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - export_type : typing.Optional[ExportFormat] - - download_resources : typing.Optional[bool] - If true, download all resource files such as images, audio, and others relevant to the tasks. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ExportsConvertResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", - method="POST", - json={ - "export_type": export_type, - "download_resources": download_resources, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ExportsConvertResponse, - parse_obj_as( - type_=ExportsConvertResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawExportsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - @contextlib.asynccontextmanager - async def download_sync( - self, - id: int, - *, - export_type: typing.Optional[str] = None, - download_all_tasks: typing.Optional[bool] = None, - download_resources: typing.Optional[bool] = None, - ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]: - """ - - If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. - Export annotated tasks as a file in a specific format. - For example, to export JSON annotations for a project to a file called `annotations.json`, - run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' - ``` - To export all tasks, including skipped tasks and others without annotations, run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' - ``` - To export specific tasks with IDs of 123 and 345, run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' - ``` - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - export_type : typing.Optional[str] - Selected export format (JSON by default) - - download_all_tasks : typing.Optional[bool] - - If true, download all tasks regardless of status. If false, download only annotated tasks. - - download_resources : typing.Optional[bool] - - If true, download all resource files such as images, audio, and others relevant to the tasks. - - ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] - - Specify a list of task IDs to retrieve only the details for those tasks. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - - Returns - ------- - typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]] - Exported data in binary format - """ - async with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(id)}/export", - method="GET", - params={ - "export_type": export_type, - "download_all_tasks": download_all_tasks, - "download_resources": download_resources, - "ids": ids, - }, - request_options=request_options, - ) as _response: - - async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - return AsyncHttpResponse( - response=_response, - data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)), - ) - await _response.aread() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError( - status_code=_response.status_code, headers=dict(_response.headers), body=_response.text - ) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - yield await _stream() - - async def list_formats( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[ExportsListFormatsResponseItem]]: - """ - - Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - - You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[ExportsListFormatsResponseItem]] - Export formats - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ExportsListFormatsResponseItem], - parse_obj_as( - type_=typing.List[ExportsListFormatsResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def list( - self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[Export]]: - """ - - Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Included in the response is information about each snapshot, such as who created it and what format it is in. - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Export]] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Export], - parse_obj_as( - type_=typing.List[Export], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - project_id: int, - *, - title: typing.Optional[str] = OMIT, - id: typing.Optional[int] = OMIT, - created_by: typing.Optional[UserSimple] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - finished_at: typing.Optional[dt.datetime] = OMIT, - status: typing.Optional[ExportSnapshotStatus] = OMIT, - md5: typing.Optional[str] = OMIT, - counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, - task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, - annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, - serialization_options: typing.Optional[SerializationOptions] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ExportSnapshot]: - """ - - Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. - - For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - title : typing.Optional[str] - - id : typing.Optional[int] - - created_by : typing.Optional[UserSimple] - - created_at : typing.Optional[dt.datetime] - Creation time - - finished_at : typing.Optional[dt.datetime] - Complete or fail time - - status : typing.Optional[ExportSnapshotStatus] - - md5 : typing.Optional[str] - - counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - - converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] - - task_filter_options : typing.Optional[TaskFilterOptions] - - annotation_filter_options : typing.Optional[AnnotationFilterOptions] - - serialization_options : typing.Optional[SerializationOptions] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ExportSnapshot] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="POST", - json={ - "title": title, - "id": id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=UserSimple, direction="write" - ), - "created_at": created_at, - "finished_at": finished_at, - "status": status, - "md5": md5, - "counters": counters, - "converted_formats": convert_and_respect_annotation_metadata( - object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" - ), - "task_filter_options": convert_and_respect_annotation_metadata( - object_=task_filter_options, annotation=TaskFilterOptions, direction="write" - ), - "annotation_filter_options": convert_and_respect_annotation_metadata( - object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" - ), - "serialization_options": convert_and_respect_annotation_metadata( - object_=serialization_options, annotation=SerializationOptions, direction="write" - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ExportSnapshot, - parse_obj_as( - type_=ExportSnapshot, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - @contextlib.asynccontextmanager - async def download( - self, - project_id: int, - export_pk: str, - *, - export_type: typing.Optional[ExportFormat] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]: - """ - - Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - - You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - export_type : typing.Optional[ExportFormat] - Selected export format. JSON is available by default. For other formats, you need to convert the export first. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - - Returns - ------- - typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]] - Exported data in binary format - """ - async with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", - method="GET", - params={ - "exportType": export_type, - }, - request_options=request_options, - ) as _response: - - async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - return AsyncHttpResponse( - response=_response, - data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)), - ) - await _response.aread() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError( - status_code=_response.status_code, headers=dict(_response.headers), body=_response.text - ) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - yield await _stream() - - async def get( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Export]: - """ - - Retrieve information about a specific export file (snapshot). - - You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). - - You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Export] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Export, - parse_obj_as( - type_=Export, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete an export file by specified export ID. - - You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def convert( - self, - project_id: int, - export_pk: str, - *, - export_type: typing.Optional[ExportFormat] = OMIT, - download_resources: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ExportsConvertResponse]: - """ - - You can use this to convert an export snapshot into the selected format. - - To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). - - You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). - - Parameters - ---------- - project_id : int - A unique integer value identifying this project. - - export_pk : str - Primary key identifying the export file. - - export_type : typing.Optional[ExportFormat] - - download_resources : typing.Optional[bool] - If true, download all resource files such as images, audio, and others relevant to the tasks. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ExportsConvertResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", - method="POST", - json={ - "export_type": export_type, - "download_resources": download_resources, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ExportsConvertResponse, - parse_obj_as( - type_=ExportsConvertResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/exports/types/__init__.py b/src/label_studio_sdk/projects/exports/types/__init__.py index 45200ad5d..8ac8e7b26 100644 --- a/src/label_studio_sdk/projects/exports/types/__init__.py +++ b/src/label_studio_sdk/projects/exports/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .exports_convert_response import ExportsConvertResponse from .exports_list_formats_response_item import ExportsListFormatsResponseItem diff --git a/src/label_studio_sdk/projects/exports/types/exports_convert_response.py b/src/label_studio_sdk/projects/exports/types/exports_convert_response.py index 3003e1e7a..1543ef9b9 100644 --- a/src/label_studio_sdk/projects/exports/types/exports_convert_response.py +++ b/src/label_studio_sdk/projects/exports/types/exports_convert_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....types.export_format import ExportFormat +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class ExportsConvertResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py b/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py index bbb67f01b..60bc39816 100644 --- a/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py +++ b/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....types.export_format import ExportFormat +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class ExportsListFormatsResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/pauses/__init__.py b/src/label_studio_sdk/projects/pauses/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/projects/pauses/__init__.py +++ b/src/label_studio_sdk/projects/pauses/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/projects/pauses/client.py b/src/label_studio_sdk/projects/pauses/client.py index 2c1961330..b0a303f43 100644 --- a/src/label_studio_sdk/projects/pauses/client.py +++ b/src/label_studio_sdk/projects/pauses/client.py @@ -1,11 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions from ...types.pause import Pause -from .raw_client import AsyncRawPausesClient, RawPausesClient +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -13,18 +16,7 @@ class PausesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawPausesClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawPausesClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawPausesClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( self, @@ -68,10 +60,27 @@ def list( user_pk=1, ) """ - _response = self._raw_client.list( - project_pk, user_pk, include_deleted=include_deleted, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="GET", + params={ + "include_deleted": include_deleted, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Pause], + parse_obj_as( + type_=typing.List[Pause], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -118,13 +127,40 @@ def create( reason="reason", ) """ - _response = self._raw_client.create( - project_pk, user_pk, reason=reason, verbose_reason=verbose_reason, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="POST", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + project_pk: int, + user_pk: int, + id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> Pause: """ Return detailed information about a specific pause. @@ -161,11 +197,32 @@ def get( id=1, ) """ - _response = self._raw_client.get(project_pk, user_pk, id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + project_pk: int, + user_pk: int, + id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ Remove a pause from the database. @@ -201,8 +258,18 @@ def delete( id=1, ) """ - _response = self._raw_client.delete(project_pk, user_pk, id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -254,26 +321,37 @@ def update( reason="reason", ) """ - _response = self._raw_client.update( - project_pk, user_pk, id, reason=reason, verbose_reason=verbose_reason, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="PATCH", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncPausesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawPausesClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawPausesClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawPausesClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( self, @@ -325,10 +403,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list( - project_pk, user_pk, include_deleted=include_deleted, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="GET", + params={ + "include_deleted": include_deleted, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Pause], + parse_obj_as( + type_=typing.List[Pause], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -383,13 +478,40 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - project_pk, user_pk, reason=reason, verbose_reason=verbose_reason, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="POST", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + project_pk: int, + user_pk: int, + id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> Pause: """ Return detailed information about a specific pause. @@ -434,11 +556,32 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(project_pk, user_pk, id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + project_pk: int, + user_pk: int, + id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ Remove a pause from the database. @@ -482,8 +625,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(project_pk, user_pk, id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -543,7 +696,29 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - project_pk, user_pk, id, reason=reason, verbose_reason=verbose_reason, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="PATCH", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/projects/pauses/raw_client.py b/src/label_studio_sdk/projects/pauses/raw_client.py deleted file mode 100644 index d109e48d4..000000000 --- a/src/label_studio_sdk/projects/pauses/raw_client.py +++ /dev/null @@ -1,543 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.pause import Pause - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawPausesClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, - project_pk: int, - user_pk: int, - *, - include_deleted: typing.Optional[bool] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[typing.List[Pause]]: - """ - Return a list of pause objects for the specified project and user. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - include_deleted : typing.Optional[bool] - Include deleted pauses - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Pause]] - Successfully retrieved a list of pauses - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="GET", - params={ - "include_deleted": include_deleted, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Pause], - parse_obj_as( - type_=typing.List[Pause], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - project_pk: int, - user_pk: int, - *, - reason: str, - verbose_reason: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Pause]: - """ - Create a new pause object for the specified project and user. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - reason : str - - verbose_reason : typing.Optional[str] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Pause] - Successfully created a pause - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="POST", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[Pause]: - """ - Return detailed information about a specific pause. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - id : int - Pause ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Pause] - Successfully retrieved the pause - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[None]: - """ - Remove a pause from the database. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - id : int - Pause ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - project_pk: int, - user_pk: int, - id: int, - *, - reason: str, - verbose_reason: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Pause]: - """ - Partially update one or more fields of an existing pause. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - id : int - Pause ID - - reason : str - - verbose_reason : typing.Optional[str] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Pause] - Successfully updated the pause (partial) - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="PATCH", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawPausesClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, - project_pk: int, - user_pk: int, - *, - include_deleted: typing.Optional[bool] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[typing.List[Pause]]: - """ - Return a list of pause objects for the specified project and user. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - include_deleted : typing.Optional[bool] - Include deleted pauses - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Pause]] - Successfully retrieved a list of pauses - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="GET", - params={ - "include_deleted": include_deleted, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Pause], - parse_obj_as( - type_=typing.List[Pause], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - project_pk: int, - user_pk: int, - *, - reason: str, - verbose_reason: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Pause]: - """ - Create a new pause object for the specified project and user. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - reason : str - - verbose_reason : typing.Optional[str] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Pause] - Successfully created a pause - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="POST", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Pause]: - """ - Return detailed information about a specific pause. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - id : int - Pause ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Pause] - Successfully retrieved the pause - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Remove a pause from the database. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - id : int - Pause ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - project_pk: int, - user_pk: int, - id: int, - *, - reason: str, - verbose_reason: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Pause]: - """ - Partially update one or more fields of an existing pause. - - Parameters - ---------- - project_pk : int - Project ID - - user_pk : int - User ID - - id : int - Pause ID - - reason : str - - verbose_reason : typing.Optional[str] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Pause] - Successfully updated the pause (partial) - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="PATCH", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/raw_client.py b/src/label_studio_sdk/projects/raw_client.py deleted file mode 100644 index 782602c5c..000000000 --- a/src/label_studio_sdk/projects/raw_client.py +++ /dev/null @@ -1,1245 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pagination import AsyncPager, BaseHttpResponse, SyncPager -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..errors.bad_request_error import BadRequestError -from ..types.project import Project -from ..types.project_label_config import ProjectLabelConfig -from .types.projects_create_response import ProjectsCreateResponse -from .types.projects_import_tasks_response import ProjectsImportTasksResponse -from .types.projects_list_response import ProjectsListResponse -from .types.projects_update_response import ProjectsUpdateResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawProjectsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, - *, - ordering: typing.Optional[str] = None, - ids: typing.Optional[str] = None, - title: typing.Optional[str] = None, - page: typing.Optional[int] = None, - page_size: typing.Optional[int] = None, - workspaces: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> SyncPager[Project]: - """ - - Return a list of the projects within your organization. - - To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. - - To retrieve a list of your Label Studio projects, update the following command to match your own environment. - Replace the domain name, port, and authorization token, then run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' - ``` - - Parameters - ---------- - ordering : typing.Optional[str] - Which field to use when ordering the results. - - ids : typing.Optional[str] - ids - - title : typing.Optional[str] - title - - page : typing.Optional[int] - A page number within the paginated result set. - - page_size : typing.Optional[int] - Number of results to return per page. - - workspaces : typing.Optional[int] - workspaces - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - SyncPager[Project] - - """ - page = page if page is not None else 1 - - _response = self._client_wrapper.httpx_client.request( - "api/projects/", - method="GET", - params={ - "ordering": ordering, - "ids": ids, - "title": title, - "page": page, - "page_size": page_size, - "workspaces": workspaces, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - ProjectsListResponse, - parse_obj_as( - type_=ProjectsListResponse, # type: ignore - object_=_response.json(), - ), - ) - _items = _parsed_response.results - _has_next = True - _get_next = lambda: self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - workspaces=workspaces, - request_options=request_options, - ) - return SyncPager( - has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - label_config: typing.Optional[str] = OMIT, - expert_instruction: typing.Optional[str] = OMIT, - show_instruction: typing.Optional[bool] = OMIT, - show_skip_button: typing.Optional[bool] = OMIT, - enable_empty_annotation: typing.Optional[bool] = OMIT, - show_annotation_history: typing.Optional[bool] = OMIT, - reveal_preannotations_interactively: typing.Optional[bool] = OMIT, - show_collab_predictions: typing.Optional[bool] = OMIT, - maximum_annotations: typing.Optional[int] = OMIT, - color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - workspace: typing.Optional[int] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ProjectsCreateResponse]: - """ - - Create a project and set up the labeling interface. For more information about setting up projects, see the following: - * [Create and configure projects](https://labelstud.io/guide/setup_project) - * [Configure labeling interface](https://labelstud.io/guide/setup) - * [Project settings](https://labelstud.io/guide/project_settings) - - ```bash - curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' - ``` - - Parameters - ---------- - title : typing.Optional[str] - Project title - - description : typing.Optional[str] - Project description - - label_config : typing.Optional[str] - Label config in XML format - - expert_instruction : typing.Optional[str] - Labeling instructions to show to the user - - show_instruction : typing.Optional[bool] - Show labeling instructions - - show_skip_button : typing.Optional[bool] - Show skip button - - enable_empty_annotation : typing.Optional[bool] - Allow empty annotations - - show_annotation_history : typing.Optional[bool] - Show annotation history - - reveal_preannotations_interactively : typing.Optional[bool] - Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest - - show_collab_predictions : typing.Optional[bool] - Show predictions to annotators - - maximum_annotations : typing.Optional[int] - Maximum annotations per task - - color : typing.Optional[str] - Project color in HEX format - - control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} - - workspace : typing.Optional[int] - Workspace ID - - model_version : typing.Optional[str] - Model version - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ProjectsCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/projects/", - method="POST", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectsCreateResponse, - parse_obj_as( - type_=ProjectsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Project]: - """ - Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Project] - Project information - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Project, - parse_obj_as( - type_=Project, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - label_config: typing.Optional[str] = OMIT, - expert_instruction: typing.Optional[str] = OMIT, - show_instruction: typing.Optional[bool] = OMIT, - show_skip_button: typing.Optional[bool] = OMIT, - enable_empty_annotation: typing.Optional[bool] = OMIT, - show_annotation_history: typing.Optional[bool] = OMIT, - reveal_preannotations_interactively: typing.Optional[bool] = OMIT, - show_collab_predictions: typing.Optional[bool] = OMIT, - maximum_annotations: typing.Optional[int] = OMIT, - color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - workspace: typing.Optional[int] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ProjectsUpdateResponse]: - """ - - Update the project settings for a specific project. For more information, see the following: - * [Create and configure projects](https://labelstud.io/guide/setup_project) - * [Configure labeling interface](https://labelstud.io/guide/setup) - * [Project settings](https://labelstud.io/guide/project_settings) - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - - If you are modifying the labeling config for project that has in-progress work, note the following: - * You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. - * If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. - - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - title : typing.Optional[str] - Project title - - description : typing.Optional[str] - Project description - - label_config : typing.Optional[str] - Label config in XML format - - expert_instruction : typing.Optional[str] - Labeling instructions to show to the user - - show_instruction : typing.Optional[bool] - Show labeling instructions - - show_skip_button : typing.Optional[bool] - Show skip button - - enable_empty_annotation : typing.Optional[bool] - Allow empty annotations - - show_annotation_history : typing.Optional[bool] - Show annotation history - - reveal_preannotations_interactively : typing.Optional[bool] - Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest - - show_collab_predictions : typing.Optional[bool] - Show predictions to annotators - - maximum_annotations : typing.Optional[int] - Maximum annotations per task - - color : typing.Optional[str] - Project color in HEX format - - control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} - - workspace : typing.Optional[int] - Workspace ID - - model_version : typing.Optional[str] - Model version - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ProjectsUpdateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectsUpdateResponse, - parse_obj_as( - type_=ProjectsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def import_tasks( - self, - id: int, - *, - request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], - commit_to_project: typing.Optional[bool] = None, - return_task_ids: typing.Optional[bool] = None, - preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[ProjectsImportTasksResponse]: - """ - - Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - - Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. - - For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. - - - There are three possible ways to import tasks with this endpoint: - - #### 1\. **POST with data** - Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. - - Update this example to specify your authorization token and Label Studio instance host, then run the following from - the command line: - - ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' - ``` - - #### 2\. **POST with files** - Send tasks as files. You can attach multiple files with different names. - - - **JSON**: text files in JavaScript object notation format - - **CSV**: text files with tables in Comma Separated Values format - - **TSV**: text files with tables in Tab Separated Value format - - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only - - Update this example to specify your authorization token, Label Studio instance host, and file name and path, - then run the following from the command line: - - ```bash - curl -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ - ``` - - #### 3\. **POST with URL** - You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. - - ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' \ - --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' - ``` - -
- - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] - - commit_to_project : typing.Optional[bool] - Set to "true" to immediately commit tasks to the project. - - return_task_ids : typing.Optional[bool] - Set to "true" to return task IDs in the response. - - preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] - List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ProjectsImportTasksResponse] - Tasks successfully imported - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/import", - method="POST", - params={ - "commit_to_project": commit_to_project, - "return_task_ids": return_task_ids, - "preannotated_from_fields": preannotated_from_fields, - }, - json=request, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectsImportTasksResponse, - parse_obj_as( - type_=ProjectsImportTasksResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - if _response.status_code == 400: - raise BadRequestError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def validate_config( - self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[ProjectLabelConfig]: - """ - - Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - label_config : str - Label config in XML format. See more about it in documentation - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ProjectLabelConfig] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/validate/", - method="POST", - json={ - "label_config": label_config, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectLabelConfig, - parse_obj_as( - type_=ProjectLabelConfig, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawProjectsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, - *, - ordering: typing.Optional[str] = None, - ids: typing.Optional[str] = None, - title: typing.Optional[str] = None, - page: typing.Optional[int] = None, - page_size: typing.Optional[int] = None, - workspaces: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncPager[Project]: - """ - - Return a list of the projects within your organization. - - To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. - - To retrieve a list of your Label Studio projects, update the following command to match your own environment. - Replace the domain name, port, and authorization token, then run the following from the command line: - ```bash - curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' - ``` - - Parameters - ---------- - ordering : typing.Optional[str] - Which field to use when ordering the results. - - ids : typing.Optional[str] - ids - - title : typing.Optional[str] - title - - page : typing.Optional[int] - A page number within the paginated result set. - - page_size : typing.Optional[int] - Number of results to return per page. - - workspaces : typing.Optional[int] - workspaces - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncPager[Project] - - """ - page = page if page is not None else 1 - - _response = await self._client_wrapper.httpx_client.request( - "api/projects/", - method="GET", - params={ - "ordering": ordering, - "ids": ids, - "title": title, - "page": page, - "page_size": page_size, - "workspaces": workspaces, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - ProjectsListResponse, - parse_obj_as( - type_=ProjectsListResponse, # type: ignore - object_=_response.json(), - ), - ) - _items = _parsed_response.results - _has_next = True - - async def _get_next(): - return await self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - workspaces=workspaces, - request_options=request_options, - ) - - return AsyncPager( - has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - label_config: typing.Optional[str] = OMIT, - expert_instruction: typing.Optional[str] = OMIT, - show_instruction: typing.Optional[bool] = OMIT, - show_skip_button: typing.Optional[bool] = OMIT, - enable_empty_annotation: typing.Optional[bool] = OMIT, - show_annotation_history: typing.Optional[bool] = OMIT, - reveal_preannotations_interactively: typing.Optional[bool] = OMIT, - show_collab_predictions: typing.Optional[bool] = OMIT, - maximum_annotations: typing.Optional[int] = OMIT, - color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - workspace: typing.Optional[int] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ProjectsCreateResponse]: - """ - - Create a project and set up the labeling interface. For more information about setting up projects, see the following: - * [Create and configure projects](https://labelstud.io/guide/setup_project) - * [Configure labeling interface](https://labelstud.io/guide/setup) - * [Project settings](https://labelstud.io/guide/project_settings) - - ```bash - curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' - ``` - - Parameters - ---------- - title : typing.Optional[str] - Project title - - description : typing.Optional[str] - Project description - - label_config : typing.Optional[str] - Label config in XML format - - expert_instruction : typing.Optional[str] - Labeling instructions to show to the user - - show_instruction : typing.Optional[bool] - Show labeling instructions - - show_skip_button : typing.Optional[bool] - Show skip button - - enable_empty_annotation : typing.Optional[bool] - Allow empty annotations - - show_annotation_history : typing.Optional[bool] - Show annotation history - - reveal_preannotations_interactively : typing.Optional[bool] - Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest - - show_collab_predictions : typing.Optional[bool] - Show predictions to annotators - - maximum_annotations : typing.Optional[int] - Maximum annotations per task - - color : typing.Optional[str] - Project color in HEX format - - control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} - - workspace : typing.Optional[int] - Workspace ID - - model_version : typing.Optional[str] - Model version - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ProjectsCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/projects/", - method="POST", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectsCreateResponse, - parse_obj_as( - type_=ProjectsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Project]: - """ - Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Project] - Project information - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Project, - parse_obj_as( - type_=Project, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - label_config: typing.Optional[str] = OMIT, - expert_instruction: typing.Optional[str] = OMIT, - show_instruction: typing.Optional[bool] = OMIT, - show_skip_button: typing.Optional[bool] = OMIT, - enable_empty_annotation: typing.Optional[bool] = OMIT, - show_annotation_history: typing.Optional[bool] = OMIT, - reveal_preannotations_interactively: typing.Optional[bool] = OMIT, - show_collab_predictions: typing.Optional[bool] = OMIT, - maximum_annotations: typing.Optional[int] = OMIT, - color: typing.Optional[str] = OMIT, - control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - workspace: typing.Optional[int] = OMIT, - model_version: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ProjectsUpdateResponse]: - """ - - Update the project settings for a specific project. For more information, see the following: - * [Create and configure projects](https://labelstud.io/guide/setup_project) - * [Configure labeling interface](https://labelstud.io/guide/setup) - * [Project settings](https://labelstud.io/guide/project_settings) - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - - If you are modifying the labeling config for project that has in-progress work, note the following: - * You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. - * If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. - - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - title : typing.Optional[str] - Project title - - description : typing.Optional[str] - Project description - - label_config : typing.Optional[str] - Label config in XML format - - expert_instruction : typing.Optional[str] - Labeling instructions to show to the user - - show_instruction : typing.Optional[bool] - Show labeling instructions - - show_skip_button : typing.Optional[bool] - Show skip button - - enable_empty_annotation : typing.Optional[bool] - Allow empty annotations - - show_annotation_history : typing.Optional[bool] - Show annotation history - - reveal_preannotations_interactively : typing.Optional[bool] - Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest - - show_collab_predictions : typing.Optional[bool] - Show predictions to annotators - - maximum_annotations : typing.Optional[int] - Maximum annotations per task - - color : typing.Optional[str] - Project color in HEX format - - control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} - - workspace : typing.Optional[int] - Workspace ID - - model_version : typing.Optional[str] - Model version - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ProjectsUpdateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectsUpdateResponse, - parse_obj_as( - type_=ProjectsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def import_tasks( - self, - id: int, - *, - request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], - commit_to_project: typing.Optional[bool] = None, - return_task_ids: typing.Optional[bool] = None, - preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[ProjectsImportTasksResponse]: - """ - - Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - - Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. - - For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. - - - There are three possible ways to import tasks with this endpoint: - - #### 1\. **POST with data** - Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. - - Update this example to specify your authorization token and Label Studio instance host, then run the following from - the command line: - - ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' - ``` - - #### 2\. **POST with files** - Send tasks as files. You can attach multiple files with different names. - - - **JSON**: text files in JavaScript object notation format - - **CSV**: text files with tables in Comma Separated Values format - - **TSV**: text files with tables in Tab Separated Value format - - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only - - Update this example to specify your authorization token, Label Studio instance host, and file name and path, - then run the following from the command line: - - ```bash - curl -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ - ``` - - #### 3\. **POST with URL** - You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. - - ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' \ - --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' - ``` - -
- - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] - - commit_to_project : typing.Optional[bool] - Set to "true" to immediately commit tasks to the project. - - return_task_ids : typing.Optional[bool] - Set to "true" to return task IDs in the response. - - preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] - List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ProjectsImportTasksResponse] - Tasks successfully imported - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/import", - method="POST", - params={ - "commit_to_project": commit_to_project, - "return_task_ids": return_task_ids, - "preannotated_from_fields": preannotated_from_fields, - }, - json=request, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectsImportTasksResponse, - parse_obj_as( - type_=ProjectsImportTasksResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - if _response.status_code == 400: - raise BadRequestError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def validate_config( - self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[ProjectLabelConfig]: - """ - - Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - label_config : str - Label config in XML format. See more about it in documentation - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ProjectLabelConfig] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/validate/", - method="POST", - json={ - "label_config": label_config, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectLabelConfig, - parse_obj_as( - type_=ProjectLabelConfig, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/types/__init__.py b/src/label_studio_sdk/projects/types/__init__.py index 71e3306e4..8f2e80d13 100644 --- a/src/label_studio_sdk/projects/types/__init__.py +++ b/src/label_studio_sdk/projects/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .projects_create_response import ProjectsCreateResponse from .projects_import_tasks_response import ProjectsImportTasksResponse from .projects_list_response import ProjectsListResponse diff --git a/src/label_studio_sdk/projects/types/projects_create_response.py b/src/label_studio_sdk/projects/types/projects_create_response.py index f8f6a555c..b56d864a8 100644 --- a/src/label_studio_sdk/projects/types/projects_create_response.py +++ b/src/label_studio_sdk/projects/types/projects_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ProjectsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py index 24f31d790..66adc2d99 100644 --- a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py +++ b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ProjectsImportTasksResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_list_response.py b/src/label_studio_sdk/projects/types/projects_list_response.py index 98a5363a7..6d16d6b32 100644 --- a/src/label_studio_sdk/projects/types/projects_list_response.py +++ b/src/label_studio_sdk/projects/types/projects_list_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...types.project import Project +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic class ProjectsListResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_update_response.py b/src/label_studio_sdk/projects/types/projects_update_response.py index ae66d5688..5034b9c8b 100644 --- a/src/label_studio_sdk/projects/types/projects_update_response.py +++ b/src/label_studio_sdk/projects/types/projects_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ProjectsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/__init__.py b/src/label_studio_sdk/prompts/__init__.py index 7104d2f12..a9ec1fd8f 100644 --- a/src/label_studio_sdk/prompts/__init__.py +++ b/src/label_studio_sdk/prompts/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, PromptsBatchFailedPredictionsResponse, diff --git a/src/label_studio_sdk/prompts/client.py b/src/label_studio_sdk/prompts/client.py index 54cafd816..fe30d2947 100644 --- a/src/label_studio_sdk/prompts/client.py +++ b/src/label_studio_sdk/prompts/client.py @@ -1,24 +1,35 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper +from .versions.client import VersionsClient +from .runs.client import RunsClient +from .indicators.client import IndicatorsClient from ..core.request_options import RequestOptions from ..types.prompt import Prompt -from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from ..types.prompt_created_by import PromptCreatedBy +import datetime as dt from ..types.prompt_organization import PromptOrganization -from .indicators.client import AsyncIndicatorsClient, IndicatorsClient -from .raw_client import AsyncRawPromptsClient, RawPromptsClient -from .runs.client import AsyncRunsClient, RunsClient +from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.jsonable_encoder import jsonable_encoder +from .types.prompts_batch_predictions_request_results_item import ( + PromptsBatchPredictionsRequestResultsItem, +) +from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse from .types.prompts_batch_failed_predictions_request_failed_predictions_item import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, ) -from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse -from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem -from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse -from .versions.client import AsyncVersionsClient, VersionsClient +from .types.prompts_batch_failed_predictions_response import ( + PromptsBatchFailedPredictionsResponse, +) +from ..core.client_wrapper import AsyncClientWrapper +from .versions.client import AsyncVersionsClient +from .runs.client import AsyncRunsClient +from .indicators.client import AsyncIndicatorsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -26,23 +37,10 @@ class PromptsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawPromptsClient(client_wrapper=client_wrapper) - self.versions = VersionsClient(client_wrapper=client_wrapper) - - self.runs = RunsClient(client_wrapper=client_wrapper) - - self.indicators = IndicatorsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawPromptsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawPromptsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.versions = VersionsClient(client_wrapper=self._client_wrapper) + self.runs = RunsClient(client_wrapper=self._client_wrapper) + self.indicators = IndicatorsClient(client_wrapper=self._client_wrapper) def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: """ @@ -67,8 +65,24 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.prompts.list() """ - _response = self._raw_client.list(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -141,20 +155,47 @@ def create( output_classes=["output_classes"], ) """ - _response = self._raw_client.create( - title=title, - input_fields=input_fields, - output_classes=output_classes, - description=description, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, - associated_projects=associated_projects, - skill_name=skill_name, + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptOrganization, + direction="write", + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt: """ @@ -184,8 +225,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -214,8 +271,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -293,21 +360,47 @@ def update( output_classes=["output_classes"], ) """ - _response = self._raw_client.update( - id, - title=title, - input_fields=input_fields, - output_classes=output_classes, - description=description, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, - associated_projects=associated_projects, - skill_name=skill_name, + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptOrganization, + direction="write", + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def batch_predictions( self, @@ -343,10 +436,36 @@ def batch_predictions( ) client.prompts.batch_predictions() """ - _response = self._raw_client.batch_predictions( - modelrun_id=modelrun_id, results=results, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def batch_failed_predictions( self, @@ -384,31 +503,44 @@ def batch_failed_predictions( ) client.prompts.batch_failed_predictions() """ - _response = self._raw_client.batch_failed_predictions( - modelrun_id=modelrun_id, failed_predictions=failed_predictions, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + "api/model-run/batch-failed-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncPromptsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawPromptsClient(client_wrapper=client_wrapper) - self.versions = AsyncVersionsClient(client_wrapper=client_wrapper) - - self.runs = AsyncRunsClient(client_wrapper=client_wrapper) - - self.indicators = AsyncIndicatorsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawPromptsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawPromptsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.versions = AsyncVersionsClient(client_wrapper=self._client_wrapper) + self.runs = AsyncRunsClient(client_wrapper=self._client_wrapper) + self.indicators = AsyncIndicatorsClient(client_wrapper=self._client_wrapper) async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: """ @@ -441,8 +573,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -523,20 +671,47 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - title=title, - input_fields=input_fields, - output_classes=output_classes, - description=description, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, - associated_projects=associated_projects, - skill_name=skill_name, + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptOrganization, + direction="write", + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt: """ @@ -574,8 +749,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -612,8 +803,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -699,21 +900,47 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - title=title, - input_fields=input_fields, - output_classes=output_classes, - description=description, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, - associated_projects=associated_projects, - skill_name=skill_name, + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptOrganization, + direction="write", + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def batch_predictions( self, @@ -757,10 +984,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.batch_predictions( - modelrun_id=modelrun_id, results=results, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def batch_failed_predictions( self, @@ -806,7 +1059,33 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.batch_failed_predictions( - modelrun_id=modelrun_id, failed_predictions=failed_predictions, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + "api/model-run/batch-failed-predictions", + method="POST", + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/prompts/indicators/__init__.py b/src/label_studio_sdk/prompts/indicators/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/prompts/indicators/__init__.py +++ b/src/label_studio_sdk/prompts/indicators/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/prompts/indicators/client.py b/src/label_studio_sdk/prompts/indicators/client.py index b3ec7bc01..58b7ac155 100644 --- a/src/label_studio_sdk/prompts/indicators/client.py +++ b/src/label_studio_sdk/prompts/indicators/client.py @@ -1,28 +1,20 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.client_wrapper import SyncClientWrapper import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions -from ...types.key_indicator_value import KeyIndicatorValue from ...types.key_indicators import KeyIndicators -from .raw_client import AsyncRawIndicatorsClient, RawIndicatorsClient +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from ...types.key_indicator_value import KeyIndicatorValue +from ...core.client_wrapper import AsyncClientWrapper class IndicatorsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawIndicatorsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawIndicatorsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawIndicatorsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> KeyIndicators: """ @@ -52,11 +44,31 @@ def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = No pk=1, ) """ - _response = self._raw_client.list(pk, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get( - self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None + self, + indicator_key: str, + pk: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> KeyIndicatorValue: """ Get a specific key indicator for the Prompt dashboard. @@ -89,24 +101,29 @@ def get( pk=1, ) """ - _response = self._raw_client.get(indicator_key, pk, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncIndicatorsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawIndicatorsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawIndicatorsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawIndicatorsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> KeyIndicators: """ @@ -144,11 +161,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(pk, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get( - self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None + self, + indicator_key: str, + pk: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> KeyIndicatorValue: """ Get a specific key indicator for the Prompt dashboard. @@ -189,5 +226,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(indicator_key, pk, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/prompts/indicators/raw_client.py b/src/label_studio_sdk/prompts/indicators/raw_client.py deleted file mode 100644 index 43bdb6b8e..000000000 --- a/src/label_studio_sdk/prompts/indicators/raw_client.py +++ /dev/null @@ -1,183 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...types.key_indicator_value import KeyIndicatorValue -from ...types.key_indicators import KeyIndicators - - -class RawIndicatorsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[KeyIndicators]: - """ - Get key indicators for the Prompt dashboard. - - Parameters - ---------- - pk : int - Inference run ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[KeyIndicators] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - KeyIndicators, - parse_obj_as( - type_=KeyIndicators, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[KeyIndicatorValue]: - """ - Get a specific key indicator for the Prompt dashboard. - - Parameters - ---------- - indicator_key : str - Key of the indicator - - pk : int - Inference run ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[KeyIndicatorValue] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - KeyIndicatorValue, - parse_obj_as( - type_=KeyIndicatorValue, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawIndicatorsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, pk: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[KeyIndicators]: - """ - Get key indicators for the Prompt dashboard. - - Parameters - ---------- - pk : int - Inference run ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[KeyIndicators] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - KeyIndicators, - parse_obj_as( - type_=KeyIndicators, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[KeyIndicatorValue]: - """ - Get a specific key indicator for the Prompt dashboard. - - Parameters - ---------- - indicator_key : str - Key of the indicator - - pk : int - Inference run ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[KeyIndicatorValue] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - KeyIndicatorValue, - parse_obj_as( - type_=KeyIndicatorValue, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/raw_client.py b/src/label_studio_sdk/prompts/raw_client.py deleted file mode 100644 index 371eb1317..000000000 --- a/src/label_studio_sdk/prompts/raw_client.py +++ /dev/null @@ -1,890 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..core.serialization import convert_and_respect_annotation_metadata -from ..types.prompt import Prompt -from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem -from ..types.prompt_created_by import PromptCreatedBy -from ..types.prompt_organization import PromptOrganization -from .types.prompts_batch_failed_predictions_request_failed_predictions_item import ( - PromptsBatchFailedPredictionsRequestFailedPredictionsItem, -) -from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse -from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem -from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawPromptsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[Prompt]]: - """ - Get a list of prompts. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Prompt]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/prompts/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Prompt], - parse_obj_as( - type_=typing.List[Prompt], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - title: str, - input_fields: typing.Sequence[str], - output_classes: typing.Sequence[str], - description: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptOrganization] = OMIT, - associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, - skill_name: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Prompt]: - """ - Create a new prompt. - - Parameters - ---------- - title : str - Title of the prompt - - input_fields : typing.Sequence[str] - List of input fields - - output_classes : typing.Sequence[str] - List of output classes - - description : typing.Optional[str] - Description of the prompt - - created_by : typing.Optional[PromptCreatedBy] - User ID of the creator of the prompt - - created_at : typing.Optional[dt.datetime] - Date and time the prompt was created - - updated_at : typing.Optional[dt.datetime] - Date and time the prompt was last updated - - organization : typing.Optional[PromptOrganization] - Organization ID of the prompt - - associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] - List of associated projects IDs or objects - - skill_name : typing.Optional[str] - Name of the skill - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Prompt] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/prompts/", - method="POST", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Prompt]: - """ - Get a prompt by ID. - - Parameters - ---------- - id : int - Prompt ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Prompt] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - Delete a prompt by ID. - - Parameters - ---------- - id : int - Prompt ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - title: str, - input_fields: typing.Sequence[str], - output_classes: typing.Sequence[str], - description: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptOrganization] = OMIT, - associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, - skill_name: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Prompt]: - """ - Update a prompt by ID. - - Parameters - ---------- - id : int - Prompt ID - - title : str - Title of the prompt - - input_fields : typing.Sequence[str] - List of input fields - - output_classes : typing.Sequence[str] - List of output classes - - description : typing.Optional[str] - Description of the prompt - - created_by : typing.Optional[PromptCreatedBy] - User ID of the creator of the prompt - - created_at : typing.Optional[dt.datetime] - Date and time the prompt was created - - updated_at : typing.Optional[dt.datetime] - Date and time the prompt was last updated - - organization : typing.Optional[PromptOrganization] - Organization ID of the prompt - - associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] - List of associated projects IDs or objects - - skill_name : typing.Optional[str] - Name of the skill - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Prompt] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def batch_predictions( - self, - *, - modelrun_id: typing.Optional[int] = OMIT, - results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[PromptsBatchPredictionsResponse]: - """ - Create a new batch prediction. - - Parameters - ---------- - modelrun_id : typing.Optional[int] - Model Run ID to associate the prediction with - - results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[PromptsBatchPredictionsResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/model-run/batch-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "results": convert_and_respect_annotation_metadata( - object_=results, - annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptsBatchPredictionsResponse, - parse_obj_as( - type_=PromptsBatchPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def batch_failed_predictions( - self, - *, - modelrun_id: typing.Optional[int] = OMIT, - failed_predictions: typing.Optional[ - typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] - ] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[PromptsBatchFailedPredictionsResponse]: - """ - Create a new batch of failed predictions. - - Parameters - ---------- - modelrun_id : typing.Optional[int] - Model Run ID where the failed predictions came from - - failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[PromptsBatchFailedPredictionsResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/model-run/batch-failed-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "failed_predictions": convert_and_respect_annotation_metadata( - object_=failed_predictions, - annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptsBatchFailedPredictionsResponse, - parse_obj_as( - type_=PromptsBatchFailedPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawPromptsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[Prompt]]: - """ - Get a list of prompts. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Prompt]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/prompts/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Prompt], - parse_obj_as( - type_=typing.List[Prompt], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - title: str, - input_fields: typing.Sequence[str], - output_classes: typing.Sequence[str], - description: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptOrganization] = OMIT, - associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, - skill_name: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Prompt]: - """ - Create a new prompt. - - Parameters - ---------- - title : str - Title of the prompt - - input_fields : typing.Sequence[str] - List of input fields - - output_classes : typing.Sequence[str] - List of output classes - - description : typing.Optional[str] - Description of the prompt - - created_by : typing.Optional[PromptCreatedBy] - User ID of the creator of the prompt - - created_at : typing.Optional[dt.datetime] - Date and time the prompt was created - - updated_at : typing.Optional[dt.datetime] - Date and time the prompt was last updated - - organization : typing.Optional[PromptOrganization] - Organization ID of the prompt - - associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] - List of associated projects IDs or objects - - skill_name : typing.Optional[str] - Name of the skill - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Prompt] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/prompts/", - method="POST", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Prompt]: - """ - Get a prompt by ID. - - Parameters - ---------- - id : int - Prompt ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Prompt] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Delete a prompt by ID. - - Parameters - ---------- - id : int - Prompt ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - title: str, - input_fields: typing.Sequence[str], - output_classes: typing.Sequence[str], - description: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptOrganization] = OMIT, - associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, - skill_name: typing.Optional[str] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Prompt]: - """ - Update a prompt by ID. - - Parameters - ---------- - id : int - Prompt ID - - title : str - Title of the prompt - - input_fields : typing.Sequence[str] - List of input fields - - output_classes : typing.Sequence[str] - List of output classes - - description : typing.Optional[str] - Description of the prompt - - created_by : typing.Optional[PromptCreatedBy] - User ID of the creator of the prompt - - created_at : typing.Optional[dt.datetime] - Date and time the prompt was created - - updated_at : typing.Optional[dt.datetime] - Date and time the prompt was last updated - - organization : typing.Optional[PromptOrganization] - Organization ID of the prompt - - associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] - List of associated projects IDs or objects - - skill_name : typing.Optional[str] - Name of the skill - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Prompt] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptOrganization, direction="write" - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def batch_predictions( - self, - *, - modelrun_id: typing.Optional[int] = OMIT, - results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[PromptsBatchPredictionsResponse]: - """ - Create a new batch prediction. - - Parameters - ---------- - modelrun_id : typing.Optional[int] - Model Run ID to associate the prediction with - - results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[PromptsBatchPredictionsResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-run/batch-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "results": convert_and_respect_annotation_metadata( - object_=results, - annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptsBatchPredictionsResponse, - parse_obj_as( - type_=PromptsBatchPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def batch_failed_predictions( - self, - *, - modelrun_id: typing.Optional[int] = OMIT, - failed_predictions: typing.Optional[ - typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] - ] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[PromptsBatchFailedPredictionsResponse]: - """ - Create a new batch of failed predictions. - - Parameters - ---------- - modelrun_id : typing.Optional[int] - Model Run ID where the failed predictions came from - - failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[PromptsBatchFailedPredictionsResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-run/batch-failed-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "failed_predictions": convert_and_respect_annotation_metadata( - object_=failed_predictions, - annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptsBatchFailedPredictionsResponse, - parse_obj_as( - type_=PromptsBatchFailedPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/runs/__init__.py b/src/label_studio_sdk/prompts/runs/__init__.py index 549df0e78..b92e5ed7e 100644 --- a/src/label_studio_sdk/prompts/runs/__init__.py +++ b/src/label_studio_sdk/prompts/runs/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import RunsListRequestProjectSubset __all__ = ["RunsListRequestProjectSubset"] diff --git a/src/label_studio_sdk/prompts/runs/client.py b/src/label_studio_sdk/prompts/runs/client.py index 4adb7cf23..f0edce47e 100644 --- a/src/label_studio_sdk/prompts/runs/client.py +++ b/src/label_studio_sdk/prompts/runs/client.py @@ -1,17 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper +from .types.runs_list_request_project_subset import RunsListRequestProjectSubset from ...core.request_options import RequestOptions from ...types.inference_run import InferenceRun -from ...types.inference_run_created_by import InferenceRunCreatedBy -from ...types.inference_run_organization import InferenceRunOrganization +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError from ...types.inference_run_project_subset import InferenceRunProjectSubset +from ...types.inference_run_organization import InferenceRunOrganization +from ...types.inference_run_created_by import InferenceRunCreatedBy from ...types.inference_run_status import InferenceRunStatus -from .raw_client import AsyncRawRunsClient, RawRunsClient -from .types.runs_list_request_project_subset import RunsListRequestProjectSubset +import datetime as dt +from ...core.serialization import convert_and_respect_annotation_metadata +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -19,18 +23,7 @@ class RunsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawRunsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawRunsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawRunsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( self, @@ -80,10 +73,28 @@ def list( project_subset="All", ) """ - _response = self._raw_client.list( - id, version_id, project=project, project_subset=project_subset, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="GET", + params={ + "project": project, + "project_subset": project_subset, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -158,39 +169,51 @@ def create( project_subset="All", ) """ - _response = self._raw_client.create( - id, - version_id, - project=project, - project_subset=project_subset, - organization=organization, - model_version=model_version, - created_by=created_by, - status=status, - job_id=job_id, - created_at=created_at, - triggered_at=triggered_at, - predictions_updated_at=predictions_updated_at, - completed_at=completed_at, + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=InferenceRunOrganization, + direction="write", + ), + "project": project, + "model_version": model_version, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=InferenceRunCreatedBy, + direction="write", + ), + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncRunsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawRunsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawRunsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawRunsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( self, @@ -248,10 +271,28 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list( - id, version_id, project=project, project_subset=project_subset, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="GET", + params={ + "project": project, + "project_subset": project_subset, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -334,20 +375,43 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - id, - version_id, - project=project, - project_subset=project_subset, - organization=organization, - model_version=model_version, - created_by=created_by, - status=status, - job_id=job_id, - created_at=created_at, - triggered_at=triggered_at, - predictions_updated_at=predictions_updated_at, - completed_at=completed_at, + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=InferenceRunOrganization, + direction="write", + ), + "project": project, + "model_version": model_version, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=InferenceRunCreatedBy, + direction="write", + ), + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/prompts/runs/raw_client.py b/src/label_studio_sdk/prompts/runs/raw_client.py deleted file mode 100644 index e2c694d33..000000000 --- a/src/label_studio_sdk/prompts/runs/raw_client.py +++ /dev/null @@ -1,348 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...core.serialization import convert_and_respect_annotation_metadata -from ...types.inference_run import InferenceRun -from ...types.inference_run_created_by import InferenceRunCreatedBy -from ...types.inference_run_organization import InferenceRunOrganization -from ...types.inference_run_project_subset import InferenceRunProjectSubset -from ...types.inference_run_status import InferenceRunStatus -from .types.runs_list_request_project_subset import RunsListRequestProjectSubset - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawRunsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, - id: int, - version_id: int, - *, - project: int, - project_subset: RunsListRequestProjectSubset, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[InferenceRun]: - """ - Get information (status, etadata, etc) about an existing inference run - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - project : int - The ID of the project that this Interence Run makes predictions on - - project_subset : RunsListRequestProjectSubset - Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[InferenceRun] - Success - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="GET", - params={ - "project": project, - "project_subset": project_subset, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - id: int, - version_id: int, - *, - project: int, - project_subset: InferenceRunProjectSubset, - organization: typing.Optional[InferenceRunOrganization] = OMIT, - model_version: typing.Optional[int] = OMIT, - created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, - status: typing.Optional[InferenceRunStatus] = OMIT, - job_id: typing.Optional[str] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - triggered_at: typing.Optional[dt.datetime] = OMIT, - predictions_updated_at: typing.Optional[dt.datetime] = OMIT, - completed_at: typing.Optional[dt.datetime] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[InferenceRun]: - """ - Run a prompt inference. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - project : int - - project_subset : InferenceRunProjectSubset - - organization : typing.Optional[InferenceRunOrganization] - - model_version : typing.Optional[int] - - created_by : typing.Optional[InferenceRunCreatedBy] - - status : typing.Optional[InferenceRunStatus] - - job_id : typing.Optional[str] - - created_at : typing.Optional[dt.datetime] - - triggered_at : typing.Optional[dt.datetime] - - predictions_updated_at : typing.Optional[dt.datetime] - - completed_at : typing.Optional[dt.datetime] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[InferenceRun] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="POST", - json={ - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=InferenceRunOrganization, direction="write" - ), - "project": project, - "model_version": model_version, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=InferenceRunCreatedBy, direction="write" - ), - "project_subset": project_subset, - "status": status, - "job_id": job_id, - "created_at": created_at, - "triggered_at": triggered_at, - "predictions_updated_at": predictions_updated_at, - "completed_at": completed_at, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawRunsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, - id: int, - version_id: int, - *, - project: int, - project_subset: RunsListRequestProjectSubset, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[InferenceRun]: - """ - Get information (status, etadata, etc) about an existing inference run - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - project : int - The ID of the project that this Interence Run makes predictions on - - project_subset : RunsListRequestProjectSubset - Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[InferenceRun] - Success - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="GET", - params={ - "project": project, - "project_subset": project_subset, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - id: int, - version_id: int, - *, - project: int, - project_subset: InferenceRunProjectSubset, - organization: typing.Optional[InferenceRunOrganization] = OMIT, - model_version: typing.Optional[int] = OMIT, - created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, - status: typing.Optional[InferenceRunStatus] = OMIT, - job_id: typing.Optional[str] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - triggered_at: typing.Optional[dt.datetime] = OMIT, - predictions_updated_at: typing.Optional[dt.datetime] = OMIT, - completed_at: typing.Optional[dt.datetime] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[InferenceRun]: - """ - Run a prompt inference. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - project : int - - project_subset : InferenceRunProjectSubset - - organization : typing.Optional[InferenceRunOrganization] - - model_version : typing.Optional[int] - - created_by : typing.Optional[InferenceRunCreatedBy] - - status : typing.Optional[InferenceRunStatus] - - job_id : typing.Optional[str] - - created_at : typing.Optional[dt.datetime] - - triggered_at : typing.Optional[dt.datetime] - - predictions_updated_at : typing.Optional[dt.datetime] - - completed_at : typing.Optional[dt.datetime] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[InferenceRun] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="POST", - json={ - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=InferenceRunOrganization, direction="write" - ), - "project": project, - "model_version": model_version, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=InferenceRunCreatedBy, direction="write" - ), - "project_subset": project_subset, - "status": status, - "job_id": job_id, - "created_at": created_at, - "triggered_at": triggered_at, - "predictions_updated_at": predictions_updated_at, - "completed_at": completed_at, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/runs/types/__init__.py b/src/label_studio_sdk/prompts/runs/types/__init__.py index 8b1f7f214..81dbca787 100644 --- a/src/label_studio_sdk/prompts/runs/types/__init__.py +++ b/src/label_studio_sdk/prompts/runs/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .runs_list_request_project_subset import RunsListRequestProjectSubset __all__ = ["RunsListRequestProjectSubset"] diff --git a/src/label_studio_sdk/prompts/types/__init__.py b/src/label_studio_sdk/prompts/types/__init__.py index cd753f61e..aa63b5ae5 100644 --- a/src/label_studio_sdk/prompts/types/__init__.py +++ b/src/label_studio_sdk/prompts/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .prompts_batch_failed_predictions_request_failed_predictions_item import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, ) diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py index 87c9cbf41..f58cf15b8 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class PromptsBatchFailedPredictionsRequestFailedPredictionsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py index ad4d5a758..210085456 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - +from ...core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchFailedPredictionsResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py index 43c268c94..d46f78c4d 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class PromptsBatchPredictionsRequestResultsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py index 30463c2be..befabdace 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - +from ...core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchPredictionsResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/versions/__init__.py b/src/label_studio_sdk/prompts/versions/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/prompts/versions/__init__.py +++ b/src/label_studio_sdk/prompts/versions/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/prompts/versions/client.py b/src/label_studio_sdk/prompts/versions/client.py index 7baf50d7c..a5b1cd3a4 100644 --- a/src/label_studio_sdk/prompts/versions/client.py +++ b/src/label_studio_sdk/prompts/versions/client.py @@ -1,17 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions -from ...types.inference_run_cost_estimate import InferenceRunCostEstimate from ...types.prompt_version import PromptVersion +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from ...types.prompt_version_provider import PromptVersionProvider from ...types.prompt_version_created_by import PromptVersionCreatedBy +import datetime as dt from ...types.prompt_version_organization import PromptVersionOrganization -from ...types.prompt_version_provider import PromptVersionProvider +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.inference_run_cost_estimate import InferenceRunCostEstimate from ...types.refined_prompt_response import RefinedPromptResponse -from .raw_client import AsyncRawVersionsClient, RawVersionsClient +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -19,18 +23,7 @@ class VersionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawVersionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawVersionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawVersionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[PromptVersion]: """ @@ -60,8 +53,24 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._raw_client.list(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -126,24 +135,52 @@ def create( id=1, ) """ - _response = self._raw_client.create( - id, - title=title, - parent_model=parent_model, - model_provider_connection=model_provider_connection, - prompt=prompt, - provider=provider, - provider_model_id=provider_model_id, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=PromptVersionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptVersionOrganization, + direction="write", + ), + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get( - self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + version_id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> PromptVersion: """ Get a prompt version by ID. @@ -176,10 +213,32 @@ def get( version_id=1, ) """ - _response = self._raw_client.get(id, version_id, request_options=request_options) - return _response.data - - def delete(self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete( + self, + id: int, + version_id: int, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: """ Delete a prompt version by ID. @@ -210,8 +269,18 @@ def delete(self, id: int, version_id: int, *, request_options: typing.Optional[R version_id=1, ) """ - _response = self._raw_client.delete(id, version_id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -281,22 +350,45 @@ def update( version_id=1, ) """ - _response = self._raw_client.update( - id, - version_id, - title=title, - parent_model=parent_model, - model_provider_connection=model_provider_connection, - prompt=prompt, - provider=provider, - provider_model_id=provider_model_id, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="PATCH", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=PromptVersionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptVersionOrganization, + direction="write", + ), + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def cost_estimate( self, @@ -346,10 +438,28 @@ def cost_estimate( project_subset=1, ) """ - _response = self._raw_client.cost_estimate( - prompt_id, version_id, project_id=project_id, project_subset=project_subset, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", + method="POST", + params={ + "project_id": project_id, + "project_subset": project_subset, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get_refined_prompt( self, @@ -394,10 +504,27 @@ def get_refined_prompt( refinement_job_id="refinement_job_id", ) """ - _response = self._raw_client.get_refined_prompt( - prompt_id, version_id, refinement_job_id=refinement_job_id, request_options=request_options + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="GET", + params={ + "refinement_job_id": refinement_job_id, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def refine_prompt( self, @@ -453,32 +580,41 @@ def refine_prompt( version_id=1, ) """ - _response = self._raw_client.refine_prompt( - prompt_id, - version_id, - async_=async_, - teacher_model_provider_connection_id=teacher_model_provider_connection_id, - teacher_model_name=teacher_model_name, - project_id=project_id, + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="POST", + params={ + "async": async_, + }, + json={ + "teacher_model_provider_connection_id": teacher_model_provider_connection_id, + "teacher_model_name": teacher_model_name, + "project_id": project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncVersionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawVersionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawVersionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawVersionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -518,8 +654,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -592,24 +744,52 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - id, - title=title, - parent_model=parent_model, - model_provider_connection=model_provider_connection, - prompt=prompt, - provider=provider, - provider_model_id=provider_model_id, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=PromptVersionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptVersionOrganization, + direction="write", + ), + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get( - self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + version_id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> PromptVersion: """ Get a prompt version by ID. @@ -650,11 +830,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, version_id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete( - self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + version_id: int, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ Delete a prompt version by ID. @@ -694,8 +894,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, version_id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -773,22 +983,45 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - version_id, - title=title, - parent_model=parent_model, - model_provider_connection=model_provider_connection, - prompt=prompt, - provider=provider, - provider_model_id=provider_model_id, - created_by=created_by, - created_at=created_at, - updated_at=updated_at, - organization=organization, + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="PATCH", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, + annotation=PromptVersionCreatedBy, + direction="write", + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, + annotation=PromptVersionOrganization, + direction="write", + ), + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def cost_estimate( self, @@ -846,10 +1079,28 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.cost_estimate( - prompt_id, version_id, project_id=project_id, project_subset=project_subset, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", + method="POST", + params={ + "project_id": project_id, + "project_subset": project_subset, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get_refined_prompt( self, @@ -902,10 +1153,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get_refined_prompt( - prompt_id, version_id, refinement_job_id=refinement_job_id, request_options=request_options + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="GET", + params={ + "refinement_job_id": refinement_job_id, + }, + request_options=request_options, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def refine_prompt( self, @@ -969,13 +1237,33 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.refine_prompt( - prompt_id, - version_id, - async_=async_, - teacher_model_provider_connection_id=teacher_model_provider_connection_id, - teacher_model_name=teacher_model_name, - project_id=project_id, + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="POST", + params={ + "async": async_, + }, + json={ + "teacher_model_provider_connection_id": teacher_model_provider_connection_id, + "teacher_model_name": teacher_model_name, + "project_id": project_id, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/prompts/versions/raw_client.py b/src/label_studio_sdk/prompts/versions/raw_client.py deleted file mode 100644 index 2892a6e79..000000000 --- a/src/label_studio_sdk/prompts/versions/raw_client.py +++ /dev/null @@ -1,1008 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from ...core.serialization import convert_and_respect_annotation_metadata -from ...types.inference_run_cost_estimate import InferenceRunCostEstimate -from ...types.prompt_version import PromptVersion -from ...types.prompt_version_created_by import PromptVersionCreatedBy -from ...types.prompt_version_organization import PromptVersionOrganization -from ...types.prompt_version_provider import PromptVersionProvider -from ...types.refined_prompt_response import RefinedPromptResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawVersionsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[PromptVersion]]: - """ - Get a list of prompt versions. - - Parameters - ---------- - id : int - Prompt ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[PromptVersion]] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[PromptVersion], - parse_obj_as( - type_=typing.List[PromptVersion], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - parent_model: typing.Optional[int] = OMIT, - model_provider_connection: typing.Optional[int] = OMIT, - prompt: typing.Optional[str] = OMIT, - provider: typing.Optional[PromptVersionProvider] = OMIT, - provider_model_id: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptVersionOrganization] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[PromptVersion]: - """ - Create a new version of a prompt. - - Parameters - ---------- - id : int - Prompt ID - - title : typing.Optional[str] - - parent_model : typing.Optional[int] - - model_provider_connection : typing.Optional[int] - - prompt : typing.Optional[str] - - provider : typing.Optional[PromptVersionProvider] - - provider_model_id : typing.Optional[str] - - created_by : typing.Optional[PromptVersionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - organization : typing.Optional[PromptVersionOrganization] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[PromptVersion] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="POST", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[PromptVersion]: - """ - Get a prompt version by ID. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[PromptVersion] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete( - self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[None]: - """ - Delete a prompt version by ID. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - version_id: int, - *, - title: typing.Optional[str] = OMIT, - parent_model: typing.Optional[int] = OMIT, - model_provider_connection: typing.Optional[int] = OMIT, - prompt: typing.Optional[str] = OMIT, - provider: typing.Optional[PromptVersionProvider] = OMIT, - provider_model_id: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptVersionOrganization] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[PromptVersion]: - """ - Update a prompt version by ID. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - title : typing.Optional[str] - - parent_model : typing.Optional[int] - - model_provider_connection : typing.Optional[int] - - prompt : typing.Optional[str] - - provider : typing.Optional[PromptVersionProvider] - - provider_model_id : typing.Optional[str] - - created_by : typing.Optional[PromptVersionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - organization : typing.Optional[PromptVersionOrganization] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[PromptVersion] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="PATCH", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def cost_estimate( - self, - prompt_id: int, - version_id: int, - *, - project_id: int, - project_subset: int, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[InferenceRunCostEstimate]: - """ - Get cost estimate for running a prompt version on a particular project/subset - - Parameters - ---------- - prompt_id : int - Prompt ID - - version_id : int - Prompt Version ID - - project_id : int - ID of the project to get an estimate for running on - - project_subset : int - Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT') - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[InferenceRunCostEstimate] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", - method="POST", - params={ - "project_id": project_id, - "project_subset": project_subset, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - InferenceRunCostEstimate, - parse_obj_as( - type_=InferenceRunCostEstimate, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get_refined_prompt( - self, - prompt_id: int, - version_id: int, - *, - refinement_job_id: str, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[RefinedPromptResponse]: - """ - Get the refined prompt based on the `refinement_job_id`. - - Parameters - ---------- - prompt_id : int - Prompt ID - - version_id : int - Prompt Version ID - - refinement_job_id : str - Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RefinedPromptResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="GET", - params={ - "refinement_job_id": refinement_job_id, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def refine_prompt( - self, - prompt_id: int, - version_id: int, - *, - async_: typing.Optional[bool] = None, - teacher_model_provider_connection_id: typing.Optional[int] = OMIT, - teacher_model_name: typing.Optional[str] = OMIT, - project_id: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[RefinedPromptResponse]: - """ - Refine a prompt version using a teacher model and save the refined prompt as a new version. - - Parameters - ---------- - prompt_id : int - Prompt ID - - version_id : int - Base Prompt Version ID - - async_ : typing.Optional[bool] - Run the refinement job asynchronously - - teacher_model_provider_connection_id : typing.Optional[int] - Model Provider Connection ID to use to refine the prompt - - teacher_model_name : typing.Optional[str] - Name of the model to use to refine the prompt - - project_id : typing.Optional[int] - Project ID to target the refined prompt for - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RefinedPromptResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="POST", - params={ - "async": async_, - }, - json={ - "teacher_model_provider_connection_id": teacher_model_provider_connection_id, - "teacher_model_name": teacher_model_name, - "project_id": project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawVersionsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[PromptVersion]]: - """ - Get a list of prompt versions. - - Parameters - ---------- - id : int - Prompt ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[PromptVersion]] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[PromptVersion], - parse_obj_as( - type_=typing.List[PromptVersion], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - parent_model: typing.Optional[int] = OMIT, - model_provider_connection: typing.Optional[int] = OMIT, - prompt: typing.Optional[str] = OMIT, - provider: typing.Optional[PromptVersionProvider] = OMIT, - provider_model_id: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptVersionOrganization] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[PromptVersion]: - """ - Create a new version of a prompt. - - Parameters - ---------- - id : int - Prompt ID - - title : typing.Optional[str] - - parent_model : typing.Optional[int] - - model_provider_connection : typing.Optional[int] - - prompt : typing.Optional[str] - - provider : typing.Optional[PromptVersionProvider] - - provider_model_id : typing.Optional[str] - - created_by : typing.Optional[PromptVersionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - organization : typing.Optional[PromptVersionOrganization] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[PromptVersion] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="POST", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[PromptVersion]: - """ - Get a prompt version by ID. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[PromptVersion] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Delete a prompt version by ID. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - version_id: int, - *, - title: typing.Optional[str] = OMIT, - parent_model: typing.Optional[int] = OMIT, - model_provider_connection: typing.Optional[int] = OMIT, - prompt: typing.Optional[str] = OMIT, - provider: typing.Optional[PromptVersionProvider] = OMIT, - provider_model_id: typing.Optional[str] = OMIT, - created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - organization: typing.Optional[PromptVersionOrganization] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[PromptVersion]: - """ - Update a prompt version by ID. - - Parameters - ---------- - id : int - Prompt ID - - version_id : int - Prompt Version ID - - title : typing.Optional[str] - - parent_model : typing.Optional[int] - - model_provider_connection : typing.Optional[int] - - prompt : typing.Optional[str] - - provider : typing.Optional[PromptVersionProvider] - - provider_model_id : typing.Optional[str] - - created_by : typing.Optional[PromptVersionCreatedBy] - - created_at : typing.Optional[dt.datetime] - - updated_at : typing.Optional[dt.datetime] - - organization : typing.Optional[PromptVersionOrganization] - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[PromptVersion] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="PATCH", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptVersionCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, annotation=PromptVersionOrganization, direction="write" - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def cost_estimate( - self, - prompt_id: int, - version_id: int, - *, - project_id: int, - project_subset: int, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[InferenceRunCostEstimate]: - """ - Get cost estimate for running a prompt version on a particular project/subset - - Parameters - ---------- - prompt_id : int - Prompt ID - - version_id : int - Prompt Version ID - - project_id : int - ID of the project to get an estimate for running on - - project_subset : int - Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT') - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[InferenceRunCostEstimate] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", - method="POST", - params={ - "project_id": project_id, - "project_subset": project_subset, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - InferenceRunCostEstimate, - parse_obj_as( - type_=InferenceRunCostEstimate, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get_refined_prompt( - self, - prompt_id: int, - version_id: int, - *, - refinement_job_id: str, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[RefinedPromptResponse]: - """ - Get the refined prompt based on the `refinement_job_id`. - - Parameters - ---------- - prompt_id : int - Prompt ID - - version_id : int - Prompt Version ID - - refinement_job_id : str - Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RefinedPromptResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="GET", - params={ - "refinement_job_id": refinement_job_id, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def refine_prompt( - self, - prompt_id: int, - version_id: int, - *, - async_: typing.Optional[bool] = None, - teacher_model_provider_connection_id: typing.Optional[int] = OMIT, - teacher_model_name: typing.Optional[str] = OMIT, - project_id: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[RefinedPromptResponse]: - """ - Refine a prompt version using a teacher model and save the refined prompt as a new version. - - Parameters - ---------- - prompt_id : int - Prompt ID - - version_id : int - Base Prompt Version ID - - async_ : typing.Optional[bool] - Run the refinement job asynchronously - - teacher_model_provider_connection_id : typing.Optional[int] - Model Provider Connection ID to use to refine the prompt - - teacher_model_name : typing.Optional[str] - Name of the model to use to refine the prompt - - project_id : typing.Optional[int] - Project ID to target the refined prompt for - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RefinedPromptResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="POST", - params={ - "async": async_, - }, - json={ - "teacher_model_provider_connection_id": teacher_model_provider_connection_id, - "teacher_model_name": teacher_model_name, - "project_id": project_id, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/tasks/__init__.py b/src/label_studio_sdk/tasks/__init__.py index f5f953fad..06c13c543 100644 --- a/src/label_studio_sdk/tasks/__init__.py +++ b/src/label_studio_sdk/tasks/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import TasksListRequestFields, TasksListResponse __all__ = ["TasksListRequestFields", "TasksListResponse"] diff --git a/src/label_studio_sdk/tasks/client.py b/src/label_studio_sdk/tasks/client.py index 1152d0300..797658b3e 100644 --- a/src/label_studio_sdk/tasks/client.py +++ b/src/label_studio_sdk/tasks/client.py @@ -1,16 +1,21 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.pagination import AsyncPager, SyncPager +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions -from ..types.base_task import BaseTask -from ..types.data_manager_task_serializer import DataManagerTaskSerializer from ..types.project_import import ProjectImport -from ..types.task import Task -from .raw_client import AsyncRawTasksClient, RawTasksClient +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.tasks_list_request_fields import TasksListRequestFields +from ..core.pagination import SyncPager +from ..types.task import Task +from .types.tasks_list_response import TasksListResponse +from ..types.base_task import BaseTask +from ..types.data_manager_task_serializer import DataManagerTaskSerializer +from ..core.client_wrapper import AsyncClientWrapper +from ..core.pagination import AsyncPager # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,21 +23,14 @@ class TasksClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawTasksClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawTasksClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawTasksClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def create_many_status( - self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + import_pk: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> ProjectImport: """ @@ -71,8 +69,24 @@ def create_many_status( import_pk="import_pk", ) """ - _response = self._raw_client.create_many_status(id, import_pk, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -104,8 +118,18 @@ def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestO id=1, ) """ - _response = self._raw_client.delete_all_tasks(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def list( self, @@ -185,18 +209,51 @@ def list( for page in response.iter_pages(): yield page """ - return self._raw_client.list( - page=page, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, + page = page if page is not None else 1 + _response = self._client_wrapper.httpx_client.request( + "api/tasks/", + method="GET", + params={ + "page": page, + "page_size": page_size, + "view": view, + "project": project, + "resolve_uri": resolve_uri, + "fields": fields, + "review": review, + "include": include, + "query": query, + }, request_options=request_options, ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) + _has_next = True + _get_next = lambda: self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + _items = _parsed_response.tasks + return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -241,8 +298,32 @@ def create( project=1, ) """ - _response = self._raw_client.create(data=data, project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/tasks/", + method="POST", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DataManagerTaskSerializer: """ @@ -274,8 +355,24 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non id="id", ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -309,8 +406,18 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = id="id", ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -358,27 +465,44 @@ def update( project=1, ) """ - _response = self._raw_client.update(id, data=data, project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncTasksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawTasksClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawTasksClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawTasksClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def create_many_status( - self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + import_pk: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> ProjectImport: """ @@ -425,8 +549,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create_many_status(id, import_pk, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -466,8 +606,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete_all_tasks(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def list( self, @@ -548,7 +698,6 @@ async def main() -> None: response = await client.tasks.list() async for item in response: yield item - # alternatively, you can paginate page-by-page async for page in response.iter_pages(): yield page @@ -556,18 +705,51 @@ async def main() -> None: asyncio.run(main()) """ - return await self._raw_client.list( - page=page, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, + page = page if page is not None else 1 + _response = await self._client_wrapper.httpx_client.request( + "api/tasks/", + method="GET", + params={ + "page": page, + "page_size": page_size, + "view": view, + "project": project, + "resolve_uri": resolve_uri, + "fields": fields, + "review": review, + "include": include, + "query": query, + }, request_options=request_options, ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) + _has_next = True + _get_next = lambda: self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + _items = _parsed_response.tasks + return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -623,8 +805,32 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create(data=data, project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/tasks/", + method="POST", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -666,8 +872,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -709,8 +931,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -769,5 +1001,29 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update(id, data=data, project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/tasks/raw_client.py b/src/label_studio_sdk/tasks/raw_client.py deleted file mode 100644 index 152ab9860..000000000 --- a/src/label_studio_sdk/tasks/raw_client.py +++ /dev/null @@ -1,816 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pagination import AsyncPager, BaseHttpResponse, SyncPager -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.base_task import BaseTask -from ..types.data_manager_task_serializer import DataManagerTaskSerializer -from ..types.project_import import ProjectImport -from ..types.task import Task -from .types.tasks_list_request_fields import TasksListRequestFields -from .types.tasks_list_response import TasksListResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawTasksClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def create_many_status( - self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[ProjectImport]: - """ - - Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. - - You will need the project ID and the unique ID of the import operation. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - The import ID is returned as part of the response when you call [Import tasks](import-tasks). - - Parameters - ---------- - id : int - The project ID. - - import_pk : str - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ProjectImport] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectImport, - parse_obj_as( - type_=ProjectImport, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete_all_tasks( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[None]: - """ - - Delete all tasks from a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def list( - self, - *, - page: typing.Optional[int] = None, - page_size: typing.Optional[int] = None, - view: typing.Optional[int] = None, - project: typing.Optional[int] = None, - resolve_uri: typing.Optional[bool] = None, - fields: typing.Optional[TasksListRequestFields] = None, - review: typing.Optional[bool] = None, - include: typing.Optional[str] = None, - query: typing.Optional[str] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> SyncPager[Task]: - """ - - Retrieve a list of tasks. - - You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). - - Parameters - ---------- - page : typing.Optional[int] - A page number within the paginated result set. - - page_size : typing.Optional[int] - Number of results to return per page. - - view : typing.Optional[int] - View ID - - project : typing.Optional[int] - Project ID - - resolve_uri : typing.Optional[bool] - Resolve task data URIs using Cloud Storage - - fields : typing.Optional[TasksListRequestFields] - Set to "all" if you want to include annotations and predictions in the response - - review : typing.Optional[bool] - Get tasks for review - - include : typing.Optional[str] - Specify which fields to include in the response - - query : typing.Optional[str] - Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. - - * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` - * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` - * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
- Example: `["completed_at"]` - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - SyncPager[Task] - List of Tasks - """ - page = page if page is not None else 1 - - _response = self._client_wrapper.httpx_client.request( - "api/tasks/", - method="GET", - params={ - "page": page, - "page_size": page_size, - "view": view, - "project": project, - "resolve_uri": resolve_uri, - "fields": fields, - "review": review, - "include": include, - "query": query, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - TasksListResponse, - parse_obj_as( - type_=TasksListResponse, # type: ignore - object_=_response.json(), - ), - ) - _items = _parsed_response.tasks - _has_next = True - _get_next = lambda: self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - return SyncPager( - has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[BaseTask]: - """ - - Create a new labeling task in Label Studio. - - The data you provide depends on your labeling config and data type. - - You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - Parameters - ---------- - data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Task data dictionary with arbitrary keys and values - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[BaseTask] - Created task - """ - _response = self._client_wrapper.httpx_client.request( - "api/tasks/", - method="POST", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, id: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[DataManagerTaskSerializer]: - """ - - Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. - The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). - - Parameters - ---------- - id : str - Task ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[DataManagerTaskSerializer] - Task - """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - DataManagerTaskSerializer, - parse_obj_as( - type_=DataManagerTaskSerializer, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a task in Label Studio. - - You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). - - This action cannot be undone. - - Parameters - ---------- - id : str - Task ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: str, - *, - data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[BaseTask]: - """ - - Update the attributes of an existing labeling task. - - You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). - - Parameters - ---------- - id : str - Task ID - - data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Task data dictionary with arbitrary keys and values - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[BaseTask] - Updated task - """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawTasksClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def create_many_status( - self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[ProjectImport]: - """ - - Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. - - You will need the project ID and the unique ID of the import operation. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - The import ID is returned as part of the response when you call [Import tasks](import-tasks). - - Parameters - ---------- - id : int - The project ID. - - import_pk : str - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ProjectImport] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ProjectImport, - parse_obj_as( - type_=ProjectImport, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete_all_tasks( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete all tasks from a specific project. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - Parameters - ---------- - id : int - A unique integer value identifying this project. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def list( - self, - *, - page: typing.Optional[int] = None, - page_size: typing.Optional[int] = None, - view: typing.Optional[int] = None, - project: typing.Optional[int] = None, - resolve_uri: typing.Optional[bool] = None, - fields: typing.Optional[TasksListRequestFields] = None, - review: typing.Optional[bool] = None, - include: typing.Optional[str] = None, - query: typing.Optional[str] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncPager[Task]: - """ - - Retrieve a list of tasks. - - You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. - - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). - - Parameters - ---------- - page : typing.Optional[int] - A page number within the paginated result set. - - page_size : typing.Optional[int] - Number of results to return per page. - - view : typing.Optional[int] - View ID - - project : typing.Optional[int] - Project ID - - resolve_uri : typing.Optional[bool] - Resolve task data URIs using Cloud Storage - - fields : typing.Optional[TasksListRequestFields] - Set to "all" if you want to include annotations and predictions in the response - - review : typing.Optional[bool] - Get tasks for review - - include : typing.Optional[str] - Specify which fields to include in the response - - query : typing.Optional[str] - Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. - - * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` - * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` - * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
- Example: `["completed_at"]` - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncPager[Task] - List of Tasks - """ - page = page if page is not None else 1 - - _response = await self._client_wrapper.httpx_client.request( - "api/tasks/", - method="GET", - params={ - "page": page, - "page_size": page_size, - "view": view, - "project": project, - "resolve_uri": resolve_uri, - "fields": fields, - "review": review, - "include": include, - "query": query, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - TasksListResponse, - parse_obj_as( - type_=TasksListResponse, # type: ignore - object_=_response.json(), - ), - ) - _items = _parsed_response.tasks - _has_next = True - - async def _get_next(): - return await self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - - return AsyncPager( - has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[BaseTask]: - """ - - Create a new labeling task in Label Studio. - - The data you provide depends on your labeling config and data type. - - You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - - Parameters - ---------- - data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Task data dictionary with arbitrary keys and values - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[BaseTask] - Created task - """ - _response = await self._client_wrapper.httpx_client.request( - "api/tasks/", - method="POST", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[DataManagerTaskSerializer]: - """ - - Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. - The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). - - Parameters - ---------- - id : str - Task ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[DataManagerTaskSerializer] - Task - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - DataManagerTaskSerializer, - parse_obj_as( - type_=DataManagerTaskSerializer, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a task in Label Studio. - - You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). - - This action cannot be undone. - - Parameters - ---------- - id : str - Task ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: str, - *, - data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[BaseTask]: - """ - - Update the attributes of an existing labeling task. - - You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). - - Parameters - ---------- - id : str - Task ID - - data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Task data dictionary with arbitrary keys and values - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[BaseTask] - Updated task - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/tasks/types/__init__.py b/src/label_studio_sdk/tasks/types/__init__.py index 436b4e548..bba1de8a3 100644 --- a/src/label_studio_sdk/tasks/types/__init__.py +++ b/src/label_studio_sdk/tasks/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .tasks_list_request_fields import TasksListRequestFields from .tasks_list_response import TasksListResponse diff --git a/src/label_studio_sdk/tasks/types/tasks_list_response.py b/src/label_studio_sdk/tasks/types/tasks_list_response.py index 00d717374..c8d9e0240 100644 --- a/src/label_studio_sdk/tasks/types/tasks_list_response.py +++ b/src/label_studio_sdk/tasks/types/tasks_list_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...types.task import Task +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class TasksListResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/tokens/__init__.py b/src/label_studio_sdk/tokens/__init__.py index 5cde0202d..f3ea2659b 100644 --- a/src/label_studio_sdk/tokens/__init__.py +++ b/src/label_studio_sdk/tokens/__init__.py @@ -1,4 +1,2 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - diff --git a/src/label_studio_sdk/tokens/client.py b/src/label_studio_sdk/tokens/client.py index 0191ade07..4b0308939 100644 --- a/src/label_studio_sdk/tokens/client.py +++ b/src/label_studio_sdk/tokens/client.py @@ -1,13 +1,18 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions -from ..types.access_token_response import AccessTokenResponse +from ..errors.not_found_error import NotFoundError +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from ..types.api_token_response import ApiTokenResponse +from ..types.access_token_response import AccessTokenResponse +from ..errors.unauthorized_error import UnauthorizedError from ..types.rotate_token_response import RotateTokenResponse -from .raw_client import AsyncRawTokensClient, RawTokensClient +from ..errors.bad_request_error import BadRequestError +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,18 +20,7 @@ class TokensClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawTokensClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawTokensClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawTokensClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -55,8 +49,35 @@ def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOpt refresh="refresh", ) """ - _response = self._raw_client.blacklist(refresh=refresh, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/token/blacklist", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ApiTokenResponse]: """ @@ -81,8 +102,24 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typ ) client.tokens.get() """ - _response = self._raw_client.get(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ApiTokenResponse], + parse_obj_as( + type_=typing.List[ApiTokenResponse], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ApiTokenResponse: """ @@ -107,8 +144,24 @@ def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ) client.tokens.create() """ - _response = self._raw_client.create(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/token", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ApiTokenResponse, + parse_obj_as( + type_=ApiTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def refresh(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> AccessTokenResponse: """ @@ -138,8 +191,41 @@ def refresh(self, *, refresh: str, request_options: typing.Optional[RequestOptio refresh="refresh", ) """ - _response = self._raw_client.refresh(refresh=refresh, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/token/refresh", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AccessTokenResponse, + parse_obj_as( + type_=AccessTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def rotate(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> RotateTokenResponse: """ @@ -169,24 +255,46 @@ def rotate(self, *, refresh: str, request_options: typing.Optional[RequestOption refresh="refresh", ) """ - _response = self._raw_client.rotate(refresh=refresh, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/token/rotate", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RotateTokenResponse, + parse_obj_as( + type_=RotateTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncTokensClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawTokensClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawTokensClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawTokensClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -223,8 +331,35 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.blacklist(refresh=refresh, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/token/blacklist", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ApiTokenResponse]: """ @@ -257,8 +392,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[ApiTokenResponse], + parse_obj_as( + type_=typing.List[ApiTokenResponse], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ApiTokenResponse: """ @@ -291,8 +442,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/token", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ApiTokenResponse, + parse_obj_as( + type_=ApiTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def refresh( self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None @@ -332,8 +499,41 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.refresh(refresh=refresh, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/token/refresh", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AccessTokenResponse, + parse_obj_as( + type_=AccessTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 401: + raise UnauthorizedError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def rotate( self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None @@ -373,5 +573,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.rotate(refresh=refresh, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/token/rotate", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + RotateTokenResponse, + parse_obj_as( + type_=RotateTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/tokens/raw_client.py b/src/label_studio_sdk/tokens/raw_client.py deleted file mode 100644 index a68f07754..000000000 --- a/src/label_studio_sdk/tokens/raw_client.py +++ /dev/null @@ -1,495 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..errors.bad_request_error import BadRequestError -from ..errors.not_found_error import NotFoundError -from ..errors.unauthorized_error import UnauthorizedError -from ..types.access_token_response import AccessTokenResponse -from ..types.api_token_response import ApiTokenResponse -from ..types.rotate_token_response import RotateTokenResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawTokensClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - Blacklist a refresh token to prevent its future use. - - Parameters - ---------- - refresh : str - JWT refresh token - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/token/blacklist", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[ApiTokenResponse]]: - """ - List all API tokens for the current user. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[ApiTokenResponse]] - List of API tokens retrieved successfully - """ - _response = self._client_wrapper.httpx_client.request( - "api/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ApiTokenResponse], - parse_obj_as( - type_=typing.List[ApiTokenResponse], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[ApiTokenResponse]: - """ - Create a new API token for the current user. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[ApiTokenResponse] - Token created successfully - """ - _response = self._client_wrapper.httpx_client.request( - "api/token", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ApiTokenResponse, - parse_obj_as( - type_=ApiTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def refresh( - self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[AccessTokenResponse]: - """ - Get a new access token, using a refresh token. - - Parameters - ---------- - refresh : str - JWT refresh token - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[AccessTokenResponse] - New access token created successfully - """ - _response = self._client_wrapper.httpx_client.request( - "api/token/refresh", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AccessTokenResponse, - parse_obj_as( - type_=AccessTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - if _response.status_code == 401: - raise UnauthorizedError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def rotate( - self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[RotateTokenResponse]: - """ - Blacklist existing refresh token, and get a new refresh token. - - Parameters - ---------- - refresh : str - JWT refresh token - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[RotateTokenResponse] - Refresh token successfully rotated - """ - _response = self._client_wrapper.httpx_client.request( - "api/token/rotate", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RotateTokenResponse, - parse_obj_as( - type_=RotateTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - if _response.status_code == 400: - raise BadRequestError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawTokensClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def blacklist( - self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Blacklist a refresh token to prevent its future use. - - Parameters - ---------- - refresh : str - JWT refresh token - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/blacklist", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - if _response.status_code == 404: - raise NotFoundError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[ApiTokenResponse]]: - """ - List all API tokens for the current user. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[ApiTokenResponse]] - List of API tokens retrieved successfully - """ - _response = await self._client_wrapper.httpx_client.request( - "api/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[ApiTokenResponse], - parse_obj_as( - type_=typing.List[ApiTokenResponse], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[ApiTokenResponse]: - """ - Create a new API token for the current user. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[ApiTokenResponse] - Token created successfully - """ - _response = await self._client_wrapper.httpx_client.request( - "api/token", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - ApiTokenResponse, - parse_obj_as( - type_=ApiTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def refresh( - self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[AccessTokenResponse]: - """ - Get a new access token, using a refresh token. - - Parameters - ---------- - refresh : str - JWT refresh token - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[AccessTokenResponse] - New access token created successfully - """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/refresh", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - AccessTokenResponse, - parse_obj_as( - type_=AccessTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - if _response.status_code == 401: - raise UnauthorizedError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def rotate( - self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[RotateTokenResponse]: - """ - Blacklist existing refresh token, and get a new refresh token. - - Parameters - ---------- - refresh : str - JWT refresh token - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[RotateTokenResponse] - Refresh token successfully rotated - """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/rotate", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - RotateTokenResponse, - parse_obj_as( - type_=RotateTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - if _response.status_code == 400: - raise BadRequestError( - headers=dict(_response.headers), - body=typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/types/__init__.py b/src/label_studio_sdk/types/__init__.py index 88988e3a4..6fe7e2249 100644 --- a/src/label_studio_sdk/types/__init__.py +++ b/src/label_studio_sdk/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .access_token_response import AccessTokenResponse from .annotation import Annotation from .annotation_completed_by import AnnotationCompletedBy diff --git a/src/label_studio_sdk/types/access_token_response.py b/src/label_studio_sdk/types/access_token_response.py index 57df75713..3c55d97da 100644 --- a/src/label_studio_sdk/types/access_token_response.py +++ b/src/label_studio_sdk/types/access_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ..core.pydantic_utilities import UniversalBaseModel import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class AccessTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/annotation.py b/src/label_studio_sdk/types/annotation.py index e27a951a7..9463e39b2 100644 --- a/src/label_studio_sdk/types/annotation.py +++ b/src/label_studio_sdk/types/annotation.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotation_completed_by import AnnotationCompletedBy +import datetime as dt from .annotation_last_action import AnnotationLastAction +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Annotation(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/annotation_completed_by.py b/src/label_studio_sdk/types/annotation_completed_by.py index f3f4a635e..b9e7caf52 100644 --- a/src/label_studio_sdk/types/annotation_completed_by.py +++ b/src/label_studio_sdk/types/annotation_completed_by.py @@ -1,7 +1,6 @@ # This file was auto-generated by Fern from our API Definition. import typing - from .user_simple import UserSimple AnnotationCompletedBy = typing.Union[UserSimple, int] diff --git a/src/label_studio_sdk/types/annotation_filter_options.py b/src/label_studio_sdk/types/annotation_filter_options.py index 3e3f86ed4..3f00e64ba 100644 --- a/src/label_studio_sdk/types/annotation_filter_options.py +++ b/src/label_studio_sdk/types/annotation_filter_options.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AnnotationFilterOptions(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/annotations_dm_field.py b/src/label_studio_sdk/types/annotations_dm_field.py index 0aa2976f4..114de210d 100644 --- a/src/label_studio_sdk/types/annotations_dm_field.py +++ b/src/label_studio_sdk/types/annotations_dm_field.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .annotations_dm_field_last_action import AnnotationsDmFieldLastAction +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AnnotationsDmField(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/api_token_response.py b/src/label_studio_sdk/types/api_token_response.py index 5f7ab74bd..72d4fddd3 100644 --- a/src/label_studio_sdk/types/api_token_response.py +++ b/src/label_studio_sdk/types/api_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ..core.pydantic_utilities import UniversalBaseModel import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class ApiTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/azure_blob_export_storage.py b/src/label_studio_sdk/types/azure_blob_export_storage.py index 20500cf84..83394ea4d 100644 --- a/src/label_studio_sdk/types/azure_blob_export_storage.py +++ b/src/label_studio_sdk/types/azure_blob_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .azure_blob_export_storage_status import AzureBlobExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AzureBlobExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/azure_blob_export_storage_status.py b/src/label_studio_sdk/types/azure_blob_export_storage_status.py index d63998f7b..0b25961af 100644 --- a/src/label_studio_sdk/types/azure_blob_export_storage_status.py +++ b/src/label_studio_sdk/types/azure_blob_export_storage_status.py @@ -3,5 +3,6 @@ import typing AzureBlobExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/azure_blob_import_storage.py b/src/label_studio_sdk/types/azure_blob_import_storage.py index c5f3648b0..3de9b873b 100644 --- a/src/label_studio_sdk/types/azure_blob_import_storage.py +++ b/src/label_studio_sdk/types/azure_blob_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .azure_blob_import_storage_status import AzureBlobImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AzureBlobImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/azure_blob_import_storage_status.py b/src/label_studio_sdk/types/azure_blob_import_storage_status.py index bb2b31b26..05c38a48b 100644 --- a/src/label_studio_sdk/types/azure_blob_import_storage_status.py +++ b/src/label_studio_sdk/types/azure_blob_import_storage_status.py @@ -3,5 +3,6 @@ import typing AzureBlobImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/base_task.py b/src/label_studio_sdk/types/base_task.py index a06228dd3..3e8251862 100644 --- a/src/label_studio_sdk/types/base_task.py +++ b/src/label_studio_sdk/types/base_task.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .base_task_file_upload import BaseTaskFileUpload +import datetime as dt from .base_task_updated_by import BaseTaskUpdatedBy +from .base_task_file_upload import BaseTaskFileUpload +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class BaseTask(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/base_task_updated_by.py b/src/label_studio_sdk/types/base_task_updated_by.py index 4a9de7471..1849b31d9 100644 --- a/src/label_studio_sdk/types/base_task_updated_by.py +++ b/src/label_studio_sdk/types/base_task_updated_by.py @@ -3,5 +3,6 @@ import typing BaseTaskUpdatedBy = typing.Union[ - typing.Optional[int], typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] + typing.Optional[int], + typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]], ] diff --git a/src/label_studio_sdk/types/base_user.py b/src/label_studio_sdk/types/base_user.py index ef14b7fa2..6edb03619 100644 --- a/src/label_studio_sdk/types/base_user.py +++ b/src/label_studio_sdk/types/base_user.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +import datetime as dt import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class BaseUser(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/comment.py b/src/label_studio_sdk/types/comment.py index f52785a71..5f48c133b 100644 --- a/src/label_studio_sdk/types/comment.py +++ b/src/label_studio_sdk/types/comment.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel +from .comment_created_by import CommentCreatedBy import datetime as dt import typing - +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .comment_created_by import CommentCreatedBy class Comment(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/converted_format.py b/src/label_studio_sdk/types/converted_format.py index 70b6583de..bc0bf56aa 100644 --- a/src/label_studio_sdk/types/converted_format.py +++ b/src/label_studio_sdk/types/converted_format.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .converted_format_status import ConvertedFormatStatus +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class ConvertedFormat(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer.py b/src/label_studio_sdk/types/data_manager_task_serializer.py index e9dcf9447..a129aa5c6 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer.py @@ -1,14 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from .data_manager_task_serializer_predictions_item import ( + DataManagerTaskSerializerPredictionsItem, +) import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotations_dm_field import AnnotationsDmField -from .data_manager_task_serializer_annotators_item import DataManagerTaskSerializerAnnotatorsItem -from .data_manager_task_serializer_drafts_item import DataManagerTaskSerializerDraftsItem -from .data_manager_task_serializer_predictions_item import DataManagerTaskSerializerPredictionsItem +from .data_manager_task_serializer_drafts_item import ( + DataManagerTaskSerializerDraftsItem, +) +from .data_manager_task_serializer_annotators_item import ( + DataManagerTaskSerializerAnnotatorsItem, +) +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class DataManagerTaskSerializer(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py index 8d334b7b4..792c89c46 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class DataManagerTaskSerializerDraftsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py index 0336ae0bd..8b01227e4 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class DataManagerTaskSerializerPredictionsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/export.py b/src/label_studio_sdk/types/export.py index c1c80f164..1d7f45038 100644 --- a/src/label_studio_sdk/types/export.py +++ b/src/label_studio_sdk/types/export.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from .user_simple import UserSimple +import datetime as dt import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .converted_format import ConvertedFormat from .export_status import ExportStatus -from .user_simple import UserSimple +from .converted_format import ConvertedFormat +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Export(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/export_snapshot.py b/src/label_studio_sdk/types/export_snapshot.py index afc7920e6..4f23e9996 100644 --- a/src/label_studio_sdk/types/export_snapshot.py +++ b/src/label_studio_sdk/types/export_snapshot.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from .user_simple import UserSimple +import datetime as dt import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .annotation_filter_options import AnnotationFilterOptions -from .converted_format import ConvertedFormat from .export_snapshot_status import ExportSnapshotStatus -from .serialization_options import SerializationOptions +from .converted_format import ConvertedFormat from .task_filter_options import TaskFilterOptions -from .user_simple import UserSimple +from .annotation_filter_options import AnnotationFilterOptions +from .serialization_options import SerializationOptions +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class ExportSnapshot(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/file_upload.py b/src/label_studio_sdk/types/file_upload.py index c4320edaf..8fcd31f62 100644 --- a/src/label_studio_sdk/types/file_upload.py +++ b/src/label_studio_sdk/types/file_upload.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class FileUpload(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/filter.py b/src/label_studio_sdk/types/filter.py index c301db612..c5e37fa4d 100644 --- a/src/label_studio_sdk/types/filter.py +++ b/src/label_studio_sdk/types/filter.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Filter(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/filter_group.py b/src/label_studio_sdk/types/filter_group.py index 7dc87b712..626b8a439 100644 --- a/src/label_studio_sdk/types/filter_group.py +++ b/src/label_studio_sdk/types/filter_group.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .filter import Filter +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class FilterGroup(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_export_storage.py b/src/label_studio_sdk/types/gcs_export_storage.py index df5cd7189..399102266 100644 --- a/src/label_studio_sdk/types/gcs_export_storage.py +++ b/src/label_studio_sdk/types/gcs_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .gcs_export_storage_status import GcsExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class GcsExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_export_storage_status.py b/src/label_studio_sdk/types/gcs_export_storage_status.py index 9284fa5b0..64534c344 100644 --- a/src/label_studio_sdk/types/gcs_export_storage_status.py +++ b/src/label_studio_sdk/types/gcs_export_storage_status.py @@ -3,5 +3,6 @@ import typing GcsExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/gcs_import_storage.py b/src/label_studio_sdk/types/gcs_import_storage.py index 269b48fd0..ee406e985 100644 --- a/src/label_studio_sdk/types/gcs_import_storage.py +++ b/src/label_studio_sdk/types/gcs_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .gcs_import_storage_status import GcsImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class GcsImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_import_storage_status.py b/src/label_studio_sdk/types/gcs_import_storage_status.py index 0c503c5e3..44d6fa825 100644 --- a/src/label_studio_sdk/types/gcs_import_storage_status.py +++ b/src/label_studio_sdk/types/gcs_import_storage_status.py @@ -3,5 +3,6 @@ import typing GcsImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/inference_run.py b/src/label_studio_sdk/types/inference_run.py index 6a1e5d6f5..b6837d9cc 100644 --- a/src/label_studio_sdk/types/inference_run.py +++ b/src/label_studio_sdk/types/inference_run.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .inference_run_created_by import InferenceRunCreatedBy from .inference_run_organization import InferenceRunOrganization +from .inference_run_created_by import InferenceRunCreatedBy from .inference_run_project_subset import InferenceRunProjectSubset from .inference_run_status import InferenceRunStatus +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic class InferenceRun(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/inference_run_cost_estimate.py b/src/label_studio_sdk/types/inference_run_cost_estimate.py index 531292f47..103f975c3 100644 --- a/src/label_studio_sdk/types/inference_run_cost_estimate.py +++ b/src/label_studio_sdk/types/inference_run_cost_estimate.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class InferenceRunCostEstimate(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/inference_run_status.py b/src/label_studio_sdk/types/inference_run_status.py index b832b23ad..b061f5320 100644 --- a/src/label_studio_sdk/types/inference_run_status.py +++ b/src/label_studio_sdk/types/inference_run_status.py @@ -3,5 +3,6 @@ import typing InferenceRunStatus = typing.Union[ - typing.Literal["Pending", "InProgress", "Completed", "Failed", "Canceled"], typing.Any + typing.Literal["Pending", "InProgress", "Completed", "Failed", "Canceled"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/jwt_settings_response.py b/src/label_studio_sdk/types/jwt_settings_response.py index 99e431d8f..a2c1fb95a 100644 --- a/src/label_studio_sdk/types/jwt_settings_response.py +++ b/src/label_studio_sdk/types/jwt_settings_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ..core.pydantic_utilities import UniversalBaseModel import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class JwtSettingsResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicator_value.py b/src/label_studio_sdk/types/key_indicator_value.py index 116d4d2f6..291358e18 100644 --- a/src/label_studio_sdk/types/key_indicator_value.py +++ b/src/label_studio_sdk/types/key_indicator_value.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class KeyIndicatorValue(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators.py b/src/label_studio_sdk/types/key_indicators.py index d03c3cd18..23e5b71de 100644 --- a/src/label_studio_sdk/types/key_indicators.py +++ b/src/label_studio_sdk/types/key_indicators.py @@ -1,7 +1,6 @@ # This file was auto-generated by Fern from our API Definition. import typing - from .key_indicators_item import KeyIndicatorsItem KeyIndicators = typing.List[KeyIndicatorsItem] diff --git a/src/label_studio_sdk/types/key_indicators_item.py b/src/label_studio_sdk/types/key_indicators_item.py index bc5dc63a6..eee9df6e8 100644 --- a/src/label_studio_sdk/types/key_indicators_item.py +++ b/src/label_studio_sdk/types/key_indicators_item.py @@ -1,11 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ..core.pydantic_utilities import UniversalBaseModel import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .key_indicators_item_additional_kpis_item import KeyIndicatorsItemAdditionalKpisItem +import typing +from .key_indicators_item_additional_kpis_item import ( + KeyIndicatorsItemAdditionalKpisItem, +) from .key_indicators_item_extra_kpis_item import KeyIndicatorsItemExtraKpisItem +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class KeyIndicatorsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py index ea89a9020..a0e1b06ca 100644 --- a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class KeyIndicatorsItemAdditionalKpisItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py index fe5f23248..9e539bc1d 100644 --- a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class KeyIndicatorsItemExtraKpisItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_export_storage.py b/src/label_studio_sdk/types/local_files_export_storage.py index 596487c0d..fffaaaa84 100644 --- a/src/label_studio_sdk/types/local_files_export_storage.py +++ b/src/label_studio_sdk/types/local_files_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .local_files_export_storage_status import LocalFilesExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class LocalFilesExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_export_storage_status.py b/src/label_studio_sdk/types/local_files_export_storage_status.py index 440144230..da4b79bb8 100644 --- a/src/label_studio_sdk/types/local_files_export_storage_status.py +++ b/src/label_studio_sdk/types/local_files_export_storage_status.py @@ -3,5 +3,6 @@ import typing LocalFilesExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/local_files_import_storage.py b/src/label_studio_sdk/types/local_files_import_storage.py index 5a0d70f93..57240a844 100644 --- a/src/label_studio_sdk/types/local_files_import_storage.py +++ b/src/label_studio_sdk/types/local_files_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .local_files_import_storage_status import LocalFilesImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class LocalFilesImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_import_storage_status.py b/src/label_studio_sdk/types/local_files_import_storage_status.py index 562ec5011..d05328925 100644 --- a/src/label_studio_sdk/types/local_files_import_storage_status.py +++ b/src/label_studio_sdk/types/local_files_import_storage_status.py @@ -3,5 +3,6 @@ import typing LocalFilesImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/ml_backend.py b/src/label_studio_sdk/types/ml_backend.py index 067326c1e..21fd41e90 100644 --- a/src/label_studio_sdk/types/ml_backend.py +++ b/src/label_studio_sdk/types/ml_backend.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from .ml_backend_state import MlBackendState import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_backend_auth_method import MlBackendAuthMethod -from .ml_backend_state import MlBackendState +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class MlBackend(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/model_provider_connection.py b/src/label_studio_sdk/types/model_provider_connection.py index 2d624ef77..4f617467c 100644 --- a/src/label_studio_sdk/types/model_provider_connection.py +++ b/src/label_studio_sdk/types/model_provider_connection.py @@ -1,15 +1,17 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod -from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy -from .model_provider_connection_organization import ModelProviderConnectionOrganization +from ..core.pydantic_utilities import UniversalBaseModel from .model_provider_connection_provider import ModelProviderConnectionProvider +import typing from .model_provider_connection_scope import ModelProviderConnectionScope +from .model_provider_connection_organization import ModelProviderConnectionOrganization +from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy +import datetime as dt +import pydantic +from .model_provider_connection_budget_reset_period import ( + ModelProviderConnectionBudgetResetPeriod, +) +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class ModelProviderConnection(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/model_provider_connection_provider.py b/src/label_studio_sdk/types/model_provider_connection_provider.py index fa58489fc..ce1b21a29 100644 --- a/src/label_studio_sdk/types/model_provider_connection_provider.py +++ b/src/label_studio_sdk/types/model_provider_connection_provider.py @@ -3,5 +3,14 @@ import typing ModelProviderConnectionProvider = typing.Union[ - typing.Literal["OpenAI", "AzureOpenAI", "AzureAIFoundry", "VertexAI", "Gemini", "Anthropic", "Custom"], typing.Any + typing.Literal[ + "OpenAI", + "AzureOpenAI", + "AzureAIFoundry", + "VertexAI", + "Gemini", + "Anthropic", + "Custom", + ], + typing.Any, ] diff --git a/src/label_studio_sdk/types/pause.py b/src/label_studio_sdk/types/pause.py index 74f5f7829..7c9af1c99 100644 --- a/src/label_studio_sdk/types/pause.py +++ b/src/label_studio_sdk/types/pause.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .pause_paused_by import PausePausedBy +import pydantic +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Pause(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prediction.py b/src/label_studio_sdk/types/prediction.py index 797207464..efb00f16b 100644 --- a/src/label_studio_sdk/types/prediction.py +++ b/src/label_studio_sdk/types/prediction.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Prediction(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project.py b/src/label_studio_sdk/types/project.py index d35c7ffa0..0c78fddeb 100644 --- a/src/label_studio_sdk/types/project.py +++ b/src/label_studio_sdk/types/project.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .project_sampling import ProjectSampling -from .project_skip_queue import ProjectSkipQueue from .prompt import Prompt from .user_simple import UserSimple +import datetime as dt +from .project_sampling import ProjectSampling +from .project_skip_queue import ProjectSkipQueue +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Project(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project_import.py b/src/label_studio_sdk/types/project_import.py index 9874942a7..331fd485a 100644 --- a/src/label_studio_sdk/types/project_import.py +++ b/src/label_studio_sdk/types/project_import.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .project_import_status import ProjectImportStatus +import datetime as dt +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class ProjectImport(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project_label_config.py b/src/label_studio_sdk/types/project_label_config.py index 5b21c6e3b..443fbb86e 100644 --- a/src/label_studio_sdk/types/project_label_config.py +++ b/src/label_studio_sdk/types/project_label_config.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ..core.pydantic_utilities import UniversalBaseModel import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class ProjectLabelConfig(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project_sampling.py b/src/label_studio_sdk/types/project_sampling.py index 0c78d6e17..27a186483 100644 --- a/src/label_studio_sdk/types/project_sampling.py +++ b/src/label_studio_sdk/types/project_sampling.py @@ -3,5 +3,6 @@ import typing ProjectSampling = typing.Union[ - typing.Literal["Sequential sampling", "Uniform sampling", "Uncertainty sampling"], typing.Any + typing.Literal["Sequential sampling", "Uniform sampling", "Uncertainty sampling"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/prompt.py b/src/label_studio_sdk/types/prompt.py index ad2173741..81c04e8c3 100644 --- a/src/label_studio_sdk/types/prompt.py +++ b/src/label_studio_sdk/types/prompt.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt -import typing - +from ..core.pydantic_utilities import UniversalBaseModel import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .prompt_associated_projects_item import PromptAssociatedProjectsItem +import typing from .prompt_created_by import PromptCreatedBy +import datetime as dt from .prompt_organization import PromptOrganization +from .prompt_associated_projects_item import PromptAssociatedProjectsItem +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Prompt(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_associated_projects_item.py b/src/label_studio_sdk/types/prompt_associated_projects_item.py index 3b1bd38e7..05ad2f37c 100644 --- a/src/label_studio_sdk/types/prompt_associated_projects_item.py +++ b/src/label_studio_sdk/types/prompt_associated_projects_item.py @@ -1,7 +1,6 @@ # This file was auto-generated by Fern from our API Definition. import typing - from .prompt_associated_projects_item_id import PromptAssociatedProjectsItemId PromptAssociatedProjectsItem = typing.Union[int, PromptAssociatedProjectsItemId] diff --git a/src/label_studio_sdk/types/prompt_associated_projects_item_id.py b/src/label_studio_sdk/types/prompt_associated_projects_item_id.py index aad441f74..01c5c53c6 100644 --- a/src/label_studio_sdk/types/prompt_associated_projects_item_id.py +++ b/src/label_studio_sdk/types/prompt_associated_projects_item_id.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptAssociatedProjectsItemId(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_version.py b/src/label_studio_sdk/types/prompt_version.py index 0058662f4..38f317b13 100644 --- a/src/label_studio_sdk/types/prompt_version.py +++ b/src/label_studio_sdk/types/prompt_version.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .prompt_version_provider import PromptVersionProvider from .prompt_version_created_by import PromptVersionCreatedBy +import datetime as dt from .prompt_version_organization import PromptVersionOrganization -from .prompt_version_provider import PromptVersionProvider +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic class PromptVersion(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_version_provider.py b/src/label_studio_sdk/types/prompt_version_provider.py index c46d1a164..efdc2d37c 100644 --- a/src/label_studio_sdk/types/prompt_version_provider.py +++ b/src/label_studio_sdk/types/prompt_version_provider.py @@ -3,5 +3,14 @@ import typing PromptVersionProvider = typing.Union[ - typing.Literal["OpenAI", "AzureOpenAI", "AzureAIFoundry", "VertexAI", "Gemini", "Anthropic", "Custom"], typing.Any + typing.Literal[ + "OpenAI", + "AzureOpenAI", + "AzureAIFoundry", + "VertexAI", + "Gemini", + "Anthropic", + "Custom", + ], + typing.Any, ] diff --git a/src/label_studio_sdk/types/redis_export_storage.py b/src/label_studio_sdk/types/redis_export_storage.py index e99a9d5c5..49d816584 100644 --- a/src/label_studio_sdk/types/redis_export_storage.py +++ b/src/label_studio_sdk/types/redis_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .redis_export_storage_status import RedisExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class RedisExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/redis_export_storage_status.py b/src/label_studio_sdk/types/redis_export_storage_status.py index 1ef9709ff..0f04ce717 100644 --- a/src/label_studio_sdk/types/redis_export_storage_status.py +++ b/src/label_studio_sdk/types/redis_export_storage_status.py @@ -3,5 +3,6 @@ import typing RedisExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/redis_import_storage.py b/src/label_studio_sdk/types/redis_import_storage.py index a80604fee..0790e279c 100644 --- a/src/label_studio_sdk/types/redis_import_storage.py +++ b/src/label_studio_sdk/types/redis_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt from .redis_import_storage_status import RedisImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class RedisImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/redis_import_storage_status.py b/src/label_studio_sdk/types/redis_import_storage_status.py index c8db5e91c..3d7d0cece 100644 --- a/src/label_studio_sdk/types/redis_import_storage_status.py +++ b/src/label_studio_sdk/types/redis_import_storage_status.py @@ -3,5 +3,6 @@ import typing RedisImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/refined_prompt_response.py b/src/label_studio_sdk/types/refined_prompt_response.py index 728f5ff81..7b77cb67f 100644 --- a/src/label_studio_sdk/types/refined_prompt_response.py +++ b/src/label_studio_sdk/types/refined_prompt_response.py @@ -1,11 +1,13 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .refined_prompt_response_refinement_status import ( + RefinedPromptResponseRefinementStatus, +) from .prompt_version import PromptVersion -from .refined_prompt_response_refinement_status import RefinedPromptResponseRefinementStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class RefinedPromptResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/rotate_token_response.py b/src/label_studio_sdk/types/rotate_token_response.py index b71032dae..81e404eb6 100644 --- a/src/label_studio_sdk/types/rotate_token_response.py +++ b/src/label_studio_sdk/types/rotate_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ..core.pydantic_utilities import UniversalBaseModel import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class RotateTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3export_storage.py b/src/label_studio_sdk/types/s3export_storage.py index a658136dd..ed4e36389 100644 --- a/src/label_studio_sdk/types/s3export_storage.py +++ b/src/label_studio_sdk/types/s3export_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +import datetime as dt import pydantic +from .s3export_storage_status import S3ExportStorageStatus import typing_extensions -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from .s3export_storage_status import S3ExportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class S3ExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3export_storage_status.py b/src/label_studio_sdk/types/s3export_storage_status.py index b4427e391..c0bddb13a 100644 --- a/src/label_studio_sdk/types/s3export_storage_status.py +++ b/src/label_studio_sdk/types/s3export_storage_status.py @@ -3,5 +3,6 @@ import typing S3ExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/s3import_storage.py b/src/label_studio_sdk/types/s3import_storage.py index 87faf912f..dc713fc85 100644 --- a/src/label_studio_sdk/types/s3import_storage.py +++ b/src/label_studio_sdk/types/s3import_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +import datetime as dt import pydantic +from .s3import_storage_status import S3ImportStorageStatus import typing_extensions -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from .s3import_storage_status import S3ImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class S3ImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3import_storage_status.py b/src/label_studio_sdk/types/s3import_storage_status.py index e77526af8..5a88667a0 100644 --- a/src/label_studio_sdk/types/s3import_storage_status.py +++ b/src/label_studio_sdk/types/s3import_storage_status.py @@ -3,5 +3,6 @@ import typing S3ImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/s3s_export_storage.py b/src/label_studio_sdk/types/s3s_export_storage.py index 8e63ac2a3..d7e1d616f 100644 --- a/src/label_studio_sdk/types/s3s_export_storage.py +++ b/src/label_studio_sdk/types/s3s_export_storage.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic +import datetime as dt import typing_extensions -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class S3SExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3s_import_storage.py b/src/label_studio_sdk/types/s3s_import_storage.py index 610b726a8..1362a231c 100644 --- a/src/label_studio_sdk/types/s3s_import_storage.py +++ b/src/label_studio_sdk/types/s3s_import_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - +import datetime as dt import pydantic +from .s3s_import_storage_status import S3SImportStorageStatus import typing_extensions -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from .s3s_import_storage_status import S3SImportStorageStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class S3SImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3s_import_storage_status.py b/src/label_studio_sdk/types/s3s_import_storage_status.py index f3765ab47..a8b9689a6 100644 --- a/src/label_studio_sdk/types/s3s_import_storage_status.py +++ b/src/label_studio_sdk/types/s3s_import_storage_status.py @@ -3,5 +3,6 @@ import typing S3SImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], + typing.Any, ] diff --git a/src/label_studio_sdk/types/serialization_option.py b/src/label_studio_sdk/types/serialization_option.py index 961b0809a..347950cba 100644 --- a/src/label_studio_sdk/types/serialization_option.py +++ b/src/label_studio_sdk/types/serialization_option.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class SerializationOption(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/serialization_options.py b/src/label_studio_sdk/types/serialization_options.py index 08d9f9655..8d6f46bb0 100644 --- a/src/label_studio_sdk/types/serialization_options.py +++ b/src/label_studio_sdk/types/serialization_options.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from .serialization_option import SerializationOption import pydantic import typing_extensions -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from .serialization_option import SerializationOption +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class SerializationOptions(UniversalBaseModel): @@ -18,7 +18,8 @@ class SerializationOptions(UniversalBaseModel): """ annotations_completed_by: typing_extensions.Annotated[ - typing.Optional[SerializationOption], FieldMetadata(alias="annotations__completed_by") + typing.Optional[SerializationOption], + FieldMetadata(alias="annotations__completed_by"), ] = None interpolate_key_frames: typing.Optional[bool] = pydantic.Field(default=None) """ diff --git a/src/label_studio_sdk/types/task.py b/src/label_studio_sdk/types/task.py index 7e3d143f2..a0965e4fc 100644 --- a/src/label_studio_sdk/types/task.py +++ b/src/label_studio_sdk/types/task.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .task_annotators_item import TaskAnnotatorsItem +import datetime as dt from .task_comment_authors_item import TaskCommentAuthorsItem +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Task(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/task_filter_options.py b/src/label_studio_sdk/types/task_filter_options.py index 023cce091..04b817727 100644 --- a/src/label_studio_sdk/types/task_filter_options.py +++ b/src/label_studio_sdk/types/task_filter_options.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class TaskFilterOptions(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/user_simple.py b/src/label_studio_sdk/types/user_simple.py index 0d258fbcd..ff7b3e16c 100644 --- a/src/label_studio_sdk/types/user_simple.py +++ b/src/label_studio_sdk/types/user_simple.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class UserSimple(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/view.py b/src/label_studio_sdk/types/view.py index ea1104e21..1713377c4 100644 --- a/src/label_studio_sdk/types/view.py +++ b/src/label_studio_sdk/types/view.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .filter_group import FilterGroup +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class View(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/webhook.py b/src/label_studio_sdk/types/webhook.py index ad0fa4264..569486be6 100644 --- a/src/label_studio_sdk/types/webhook.py +++ b/src/label_studio_sdk/types/webhook.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .webhook_actions_item import WebhookActionsItem +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Webhook(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/webhook_serializer_for_update.py b/src/label_studio_sdk/types/webhook_serializer_for_update.py index 782886d5f..832e01de7 100644 --- a/src/label_studio_sdk/types/webhook_serializer_for_update.py +++ b/src/label_studio_sdk/types/webhook_serializer_for_update.py @@ -1,11 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +from .webhook_serializer_for_update_actions_item import ( + WebhookSerializerForUpdateActionsItem, +) +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class WebhookSerializerForUpdate(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/workspace.py b/src/label_studio_sdk/types/workspace.py index bda30027f..e0adc186c 100644 --- a/src/label_studio_sdk/types/workspace.py +++ b/src/label_studio_sdk/types/workspace.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +from ..core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Workspace(UniversalBaseModel): diff --git a/src/label_studio_sdk/users/__init__.py b/src/label_studio_sdk/users/__init__.py index 3c70756d1..92fd561f2 100644 --- a/src/label_studio_sdk/users/__init__.py +++ b/src/label_studio_sdk/users/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import UsersGetTokenResponse, UsersResetTokenResponse __all__ = ["UsersGetTokenResponse", "UsersResetTokenResponse"] diff --git a/src/label_studio_sdk/users/client.py b/src/label_studio_sdk/users/client.py index a6733ce9a..62618163a 100644 --- a/src/label_studio_sdk/users/client.py +++ b/src/label_studio_sdk/users/client.py @@ -1,13 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions -from ..types.base_user import BaseUser -from .raw_client import AsyncRawUsersClient, RawUsersClient -from .types.users_get_token_response import UsersGetTokenResponse from .types.users_reset_token_response import UsersResetTokenResponse +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from .types.users_get_token_response import UsersGetTokenResponse +from ..types.base_user import BaseUser +from ..core.jsonable_encoder import jsonable_encoder +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,18 +18,7 @@ class UsersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawUsersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawUsersClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawUsersClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersResetTokenResponse: """ @@ -51,8 +43,24 @@ def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None ) client.users.reset_token() """ - _response = self._raw_client.reset_token(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/current-user/reset-token/", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersGetTokenResponse: """ @@ -77,8 +85,24 @@ def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) ) client.users.get_token() """ - _response = self._raw_client.get_token(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/current-user/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -103,8 +127,24 @@ def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> ) client.users.whoami() """ - _response = self._raw_client.whoami(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/current-user/whoami", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ @@ -130,8 +170,24 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.users.list() """ - _response = self._raw_client.list(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/users/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -197,19 +253,39 @@ def create( ) client.users.create() """ - _response = self._raw_client.create( - id=id, - first_name=first_name, - last_name=last_name, - username=username, - email=email, - avatar=avatar, - initials=initials, - phone=phone, - allow_newsletters=allow_newsletters, + _response = self._client_wrapper.httpx_client.request( + "api/users/", + method="POST", + json={ + "id": id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -241,8 +317,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -276,8 +368,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -351,36 +453,44 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - users_update_request_id=users_update_request_id, - first_name=first_name, - last_name=last_name, - username=username, - email=email, - avatar=avatar, - initials=initials, - phone=phone, - allow_newsletters=allow_newsletters, + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "id": users_update_request_id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncUsersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawUsersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawUsersClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawUsersClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersResetTokenResponse: """ @@ -413,8 +523,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.reset_token(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/reset-token/", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersGetTokenResponse: """ @@ -447,8 +573,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get_token(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -481,8 +623,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.whoami(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/whoami", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ @@ -516,8 +674,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/users/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -591,19 +765,39 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - id=id, - first_name=first_name, - last_name=last_name, - username=username, - email=email, - avatar=avatar, - initials=initials, - phone=phone, - allow_newsletters=allow_newsletters, + _response = await self._client_wrapper.httpx_client.request( + "api/users/", + method="POST", + json={ + "id": id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -643,8 +837,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -686,8 +896,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -769,17 +989,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - users_update_request_id=users_update_request_id, - first_name=first_name, - last_name=last_name, - username=username, - email=email, - avatar=avatar, - initials=initials, - phone=phone, - allow_newsletters=allow_newsletters, + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "id": users_update_request_id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/users/raw_client.py b/src/label_studio_sdk/users/raw_client.py deleted file mode 100644 index 323c86805..000000000 --- a/src/label_studio_sdk/users/raw_client.py +++ /dev/null @@ -1,833 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.base_user import BaseUser -from .types.users_get_token_response import UsersGetTokenResponse -from .types.users_reset_token_response import UsersResetTokenResponse - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawUsersClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def reset_token( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[UsersResetTokenResponse]: - """ - Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[UsersResetTokenResponse] - User token response - """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - UsersResetTokenResponse, - parse_obj_as( - type_=UsersResetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get_token( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[UsersGetTokenResponse]: - """ - Get a access token to authenticate to the API as the current user. To find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[UsersGetTokenResponse] - User token response - """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - UsersGetTokenResponse, - parse_obj_as( - type_=UsersGetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[BaseUser]: - """ - Get information about your user account, such as your username, email, and user ID. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[BaseUser] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/whoami", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[BaseUser]]: - """ - - List all users in your Label Studio organization. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[BaseUser]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/users/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[BaseUser], - parse_obj_as( - type_=typing.List[BaseUser], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - id: typing.Optional[int] = OMIT, - first_name: typing.Optional[str] = OMIT, - last_name: typing.Optional[str] = OMIT, - username: typing.Optional[str] = OMIT, - email: typing.Optional[str] = OMIT, - avatar: typing.Optional[str] = OMIT, - initials: typing.Optional[str] = OMIT, - phone: typing.Optional[str] = OMIT, - allow_newsletters: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[BaseUser]: - """ - - Create a user in Label Studio. - - Parameters - ---------- - id : typing.Optional[int] - User ID - - first_name : typing.Optional[str] - First name of the user - - last_name : typing.Optional[str] - Last name of the user - - username : typing.Optional[str] - Username of the user - - email : typing.Optional[str] - Email of the user - - avatar : typing.Optional[str] - Avatar URL of the user - - initials : typing.Optional[str] - Initials of the user - - phone : typing.Optional[str] - Phone number of the user - - allow_newsletters : typing.Optional[bool] - Whether the user allows newsletters - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[BaseUser] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/users/", - method="POST", - json={ - "id": id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[BaseUser]: - """ - - Get info about a specific Label Studio user. - You will need to provide their user ID. You can find a list of all user IDs using [List users](list). - - Parameters - ---------- - id : int - User ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[BaseUser] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific Label Studio user. - - You will need to provide their user ID. You can find a list of all user IDs using [List users](list). - - Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. - - Parameters - ---------- - id : int - User ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - users_update_request_id: typing.Optional[int] = OMIT, - first_name: typing.Optional[str] = OMIT, - last_name: typing.Optional[str] = OMIT, - username: typing.Optional[str] = OMIT, - email: typing.Optional[str] = OMIT, - avatar: typing.Optional[str] = OMIT, - initials: typing.Optional[str] = OMIT, - phone: typing.Optional[str] = OMIT, - allow_newsletters: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[BaseUser]: - """ - - Update details for a specific Label Studio user, such as their name or contact information. - - You will need to provide their user ID. You can find a list of all user IDs using [List users](list). - - Parameters - ---------- - id : int - User ID - - users_update_request_id : typing.Optional[int] - User ID - - first_name : typing.Optional[str] - First name of the user - - last_name : typing.Optional[str] - Last name of the user - - username : typing.Optional[str] - Username of the user - - email : typing.Optional[str] - Email of the user - - avatar : typing.Optional[str] - Avatar URL of the user - - initials : typing.Optional[str] - Initials of the user - - phone : typing.Optional[str] - Phone number of the user - - allow_newsletters : typing.Optional[bool] - Whether the user allows newsletters - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[BaseUser] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "id": users_update_request_id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawUsersClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def reset_token( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[UsersResetTokenResponse]: - """ - Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[UsersResetTokenResponse] - User token response - """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - UsersResetTokenResponse, - parse_obj_as( - type_=UsersResetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get_token( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[UsersGetTokenResponse]: - """ - Get a access token to authenticate to the API as the current user. To find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[UsersGetTokenResponse] - User token response - """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - UsersGetTokenResponse, - parse_obj_as( - type_=UsersGetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[BaseUser]: - """ - Get information about your user account, such as your username, email, and user ID. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[BaseUser] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/whoami", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def list( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[BaseUser]]: - """ - - List all users in your Label Studio organization. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[BaseUser]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/users/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[BaseUser], - parse_obj_as( - type_=typing.List[BaseUser], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - id: typing.Optional[int] = OMIT, - first_name: typing.Optional[str] = OMIT, - last_name: typing.Optional[str] = OMIT, - username: typing.Optional[str] = OMIT, - email: typing.Optional[str] = OMIT, - avatar: typing.Optional[str] = OMIT, - initials: typing.Optional[str] = OMIT, - phone: typing.Optional[str] = OMIT, - allow_newsletters: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[BaseUser]: - """ - - Create a user in Label Studio. - - Parameters - ---------- - id : typing.Optional[int] - User ID - - first_name : typing.Optional[str] - First name of the user - - last_name : typing.Optional[str] - Last name of the user - - username : typing.Optional[str] - Username of the user - - email : typing.Optional[str] - Email of the user - - avatar : typing.Optional[str] - Avatar URL of the user - - initials : typing.Optional[str] - Initials of the user - - phone : typing.Optional[str] - Phone number of the user - - allow_newsletters : typing.Optional[bool] - Whether the user allows newsletters - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[BaseUser] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/users/", - method="POST", - json={ - "id": id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[BaseUser]: - """ - - Get info about a specific Label Studio user. - You will need to provide their user ID. You can find a list of all user IDs using [List users](list). - - Parameters - ---------- - id : int - User ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[BaseUser] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific Label Studio user. - - You will need to provide their user ID. You can find a list of all user IDs using [List users](list). - - Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. - - Parameters - ---------- - id : int - User ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - users_update_request_id: typing.Optional[int] = OMIT, - first_name: typing.Optional[str] = OMIT, - last_name: typing.Optional[str] = OMIT, - username: typing.Optional[str] = OMIT, - email: typing.Optional[str] = OMIT, - avatar: typing.Optional[str] = OMIT, - initials: typing.Optional[str] = OMIT, - phone: typing.Optional[str] = OMIT, - allow_newsletters: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[BaseUser]: - """ - - Update details for a specific Label Studio user, such as their name or contact information. - - You will need to provide their user ID. You can find a list of all user IDs using [List users](list). - - Parameters - ---------- - id : int - User ID - - users_update_request_id : typing.Optional[int] - User ID - - first_name : typing.Optional[str] - First name of the user - - last_name : typing.Optional[str] - Last name of the user - - username : typing.Optional[str] - Username of the user - - email : typing.Optional[str] - Email of the user - - avatar : typing.Optional[str] - Avatar URL of the user - - initials : typing.Optional[str] - Initials of the user - - phone : typing.Optional[str] - Phone number of the user - - allow_newsletters : typing.Optional[bool] - Whether the user allows newsletters - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[BaseUser] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "id": users_update_request_id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/users/types/__init__.py b/src/label_studio_sdk/users/types/__init__.py index 4aa8d7683..69c55eb33 100644 --- a/src/label_studio_sdk/users/types/__init__.py +++ b/src/label_studio_sdk/users/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .users_get_token_response import UsersGetTokenResponse from .users_reset_token_response import UsersResetTokenResponse diff --git a/src/label_studio_sdk/users/types/users_get_token_response.py b/src/label_studio_sdk/users/types/users_get_token_response.py index fe19030d1..815096b07 100644 --- a/src/label_studio_sdk/users/types/users_get_token_response.py +++ b/src/label_studio_sdk/users/types/users_get_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class UsersGetTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/users/types/users_reset_token_response.py b/src/label_studio_sdk/users/types/users_reset_token_response.py index a038930ce..0bdf5a8fe 100644 --- a/src/label_studio_sdk/users/types/users_reset_token_response.py +++ b/src/label_studio_sdk/users/types/users_reset_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class UsersResetTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/versions/__init__.py b/src/label_studio_sdk/versions/__init__.py index 64e73bb6c..e3626239c 100644 --- a/src/label_studio_sdk/versions/__init__.py +++ b/src/label_studio_sdk/versions/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import VersionsGetResponse, VersionsGetResponseEdition __all__ = ["VersionsGetResponse", "VersionsGetResponseEdition"] diff --git a/src/label_studio_sdk/versions/client.py b/src/label_studio_sdk/versions/client.py index 4584d7665..f9d9df035 100644 --- a/src/label_studio_sdk/versions/client.py +++ b/src/label_studio_sdk/versions/client.py @@ -1,27 +1,18 @@ # This file was auto-generated by Fern from our API Definition. +from ..core.client_wrapper import SyncClientWrapper import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from .raw_client import AsyncRawVersionsClient, RawVersionsClient from .types.versions_get_response import VersionsGetResponse +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper class VersionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawVersionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawVersionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawVersionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> VersionsGetResponse: """ @@ -46,24 +37,29 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ver ) client.versions.get() """ - _response = self._raw_client.get(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/version", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + VersionsGetResponse, + parse_obj_as( + type_=VersionsGetResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncVersionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawVersionsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawVersionsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawVersionsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> VersionsGetResponse: """ @@ -96,5 +92,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/version", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + VersionsGetResponse, + parse_obj_as( + type_=VersionsGetResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/versions/raw_client.py b/src/label_studio_sdk/versions/raw_client.py deleted file mode 100644 index 4ad3ed634..000000000 --- a/src/label_studio_sdk/versions/raw_client.py +++ /dev/null @@ -1,91 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from .types.versions_get_response import VersionsGetResponse - - -class RawVersionsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[VersionsGetResponse]: - """ - Get version information about the Label Studio instance. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[VersionsGetResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/version", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - VersionsGetResponse, - parse_obj_as( - type_=VersionsGetResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawVersionsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def get( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[VersionsGetResponse]: - """ - Get version information about the Label Studio instance. - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[VersionsGetResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/version", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - VersionsGetResponse, - parse_obj_as( - type_=VersionsGetResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/versions/types/__init__.py b/src/label_studio_sdk/versions/types/__init__.py index a4dddd9ad..852040b2d 100644 --- a/src/label_studio_sdk/versions/types/__init__.py +++ b/src/label_studio_sdk/versions/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .versions_get_response import VersionsGetResponse from .versions_get_response_edition import VersionsGetResponseEdition diff --git a/src/label_studio_sdk/versions/types/versions_get_response.py b/src/label_studio_sdk/versions/types/versions_get_response.py index 3ef46f094..1e3178579 100644 --- a/src/label_studio_sdk/versions/types/versions_get_response.py +++ b/src/label_studio_sdk/versions/types/versions_get_response.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - import pydantic import typing_extensions -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata from .versions_get_response_edition import VersionsGetResponseEdition +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class VersionsGetResponse(UniversalBaseModel): @@ -16,21 +16,24 @@ class VersionsGetResponse(UniversalBaseModel): """ label_studio_os_package: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-os-package") + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], + FieldMetadata(alias="label-studio-os-package"), ] = pydantic.Field(default=None) """ Information about the Label Studio open source package """ label_studio_os_backend: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-os-backend") + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], + FieldMetadata(alias="label-studio-os-backend"), ] = pydantic.Field(default=None) """ Information about the Label Studio backend """ label_studio_frontend: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-frontend") + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], + FieldMetadata(alias="label-studio-frontend"), ] = pydantic.Field(default=None) """ Information about the Label Studio frontend @@ -42,7 +45,8 @@ class VersionsGetResponse(UniversalBaseModel): """ label_studio_converter: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-converter") + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], + FieldMetadata(alias="label-studio-converter"), ] = pydantic.Field(default=None) """ Information about the Label Studio converter component diff --git a/src/label_studio_sdk/views/__init__.py b/src/label_studio_sdk/views/__init__.py index 9fc67fadc..498ccadb3 100644 --- a/src/label_studio_sdk/views/__init__.py +++ b/src/label_studio_sdk/views/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import ( ViewsCreateRequestData, ViewsCreateRequestDataFilters, diff --git a/src/label_studio_sdk/views/client.py b/src/label_studio_sdk/views/client.py index 662ad680d..ff5b7e607 100644 --- a/src/label_studio_sdk/views/client.py +++ b/src/label_studio_sdk/views/client.py @@ -1,13 +1,17 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.view import View -from .raw_client import AsyncRawViewsClient, RawViewsClient +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from .types.views_create_request_data import ViewsCreateRequestData +from ..core.serialization import convert_and_respect_annotation_metadata +from ..core.jsonable_encoder import jsonable_encoder from .types.views_update_request_data import ViewsUpdateRequestData +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,21 +19,13 @@ class ViewsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawViewsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawViewsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawViewsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[View]: """ @@ -59,8 +55,27 @@ def list( ) client.views.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -100,8 +115,34 @@ def create( ) client.views.create() """ - _response = self._raw_client.create(data=data, project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="POST", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -132,8 +173,25 @@ def delete_all(self, *, project: int, request_options: typing.Optional[RequestOp project=1, ) """ - _response = self._raw_client.delete_all(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/reset/", + method="DELETE", + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ @@ -164,8 +222,24 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non id="id", ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -194,8 +268,18 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = id="id", ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -239,27 +323,45 @@ def update( id="id", ) """ - _response = self._raw_client.update(id, data=data, project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncViewsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawViewsClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawViewsClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawViewsClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[View]: """ @@ -297,8 +399,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -346,8 +467,34 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create(data=data, project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="POST", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -386,8 +533,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete_all(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/reset/", + method="DELETE", + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ @@ -426,8 +590,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -464,8 +644,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -517,5 +707,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update(id, data=data, project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/views/raw_client.py b/src/label_studio_sdk/views/raw_client.py deleted file mode 100644 index a778be49f..000000000 --- a/src/label_studio_sdk/views/raw_client.py +++ /dev/null @@ -1,574 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..core.serialization import convert_and_respect_annotation_metadata -from ..types.view import View -from .types.views_create_request_data import ViewsCreateRequestData -from .types.views_update_request_data import ViewsUpdateRequestData - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawViewsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[View]]: - """ - - List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. - - You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[View]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[View], - parse_obj_as( - type_=typing.List[View], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - data: typing.Optional[ViewsCreateRequestData] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[View]: - """ - - Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. - - You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). - - Parameters - ---------- - data : typing.Optional[ViewsCreateRequestData] - Custom view data - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[View] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="POST", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsCreateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete_all( - self, *, project: int, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[None]: - """ - - Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. - - You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). - - Parameters - ---------- - project : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/reset/", - method="DELETE", - json={ - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[View]: - """ - - Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). - - Parameters - ---------- - id : str - View ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[View] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). - - Parameters - ---------- - id : str - View ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: str, - *, - data: typing.Optional[ViewsUpdateRequestData] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[View]: - """ - - You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). - - Parameters - ---------- - id : str - View ID - - data : typing.Optional[ViewsUpdateRequestData] - Custom view data - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[View] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsUpdateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawViewsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[View]]: - """ - - List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. - - You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). - - Parameters - ---------- - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[View]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[View], - parse_obj_as( - type_=typing.List[View], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - data: typing.Optional[ViewsCreateRequestData] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[View]: - """ - - Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. - - You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). - - Parameters - ---------- - data : typing.Optional[ViewsCreateRequestData] - Custom view data - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[View] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="POST", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsCreateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete_all( - self, *, project: int, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. - - You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). - - Parameters - ---------- - project : int - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/reset/", - method="DELETE", - json={ - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[View]: - """ - - Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). - - Parameters - ---------- - id : str - View ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[View] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: str, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). - - Parameters - ---------- - id : str - View ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: str, - *, - data: typing.Optional[ViewsUpdateRequestData] = OMIT, - project: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[View]: - """ - - You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). - - Parameters - ---------- - id : str - View ID - - data : typing.Optional[ViewsUpdateRequestData] - Custom view data - - project : typing.Optional[int] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[View] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsUpdateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/views/types/__init__.py b/src/label_studio_sdk/views/types/__init__.py index 0404fff5e..56164fc06 100644 --- a/src/label_studio_sdk/views/types/__init__.py +++ b/src/label_studio_sdk/views/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .views_create_request_data import ViewsCreateRequestData from .views_create_request_data_filters import ViewsCreateRequestDataFilters from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction diff --git a/src/label_studio_sdk/views/types/views_create_request_data.py b/src/label_studio_sdk/views/types/views_create_request_data.py index e7020d25a..e5a689a01 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data.py +++ b/src/label_studio_sdk/views/types/views_create_request_data.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .views_create_request_data_filters import ViewsCreateRequestDataFilters +import pydantic from .views_create_request_data_ordering_item import ViewsCreateRequestDataOrderingItem +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsCreateRequestData(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters.py b/src/label_studio_sdk/views/types/views_create_request_data_filters.py index f0ea6bfa6..db954fc98 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters.py @@ -1,11 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ...core.pydantic_utilities import UniversalBaseModel +from .views_create_request_data_filters_conjunction import ( + ViewsCreateRequestDataFiltersConjunction, +) import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction -from .views_create_request_data_filters_items_item import ViewsCreateRequestDataFiltersItemsItem +import typing +from .views_create_request_data_filters_items_item import ( + ViewsCreateRequestDataFiltersItemsItem, +) +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsCreateRequestDataFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py index a7280b1b3..5d9620fa2 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py @@ -1,12 +1,18 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ...core.pydantic_utilities import UniversalBaseModel +from .views_create_request_data_filters_items_item_filter import ( + ViewsCreateRequestDataFiltersItemsItemFilter, +) import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .views_create_request_data_filters_items_item_filter import ViewsCreateRequestDataFiltersItemsItemFilter -from .views_create_request_data_filters_items_item_operator import ViewsCreateRequestDataFiltersItemsItemOperator -from .views_create_request_data_filters_items_item_value import ViewsCreateRequestDataFiltersItemsItemValue +from .views_create_request_data_filters_items_item_operator import ( + ViewsCreateRequestDataFiltersItemsItemOperator, +) +from .views_create_request_data_filters_items_item_value import ( + ViewsCreateRequestDataFiltersItemsItemValue, +) +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class ViewsCreateRequestDataFiltersItemsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data.py b/src/label_studio_sdk/views/types/views_update_request_data.py index 0b44365a7..1a0ceda3c 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data.py +++ b/src/label_studio_sdk/views/types/views_update_request_data.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. +from ...core.pydantic_utilities import UniversalBaseModel import typing - -import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .views_update_request_data_filters import ViewsUpdateRequestDataFilters +import pydantic from .views_update_request_data_ordering_item import ViewsUpdateRequestDataOrderingItem +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsUpdateRequestData(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters.py b/src/label_studio_sdk/views/types/views_update_request_data_filters.py index f4fc71c12..aa150835b 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters.py @@ -1,11 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ...core.pydantic_utilities import UniversalBaseModel +from .views_update_request_data_filters_conjunction import ( + ViewsUpdateRequestDataFiltersConjunction, +) import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .views_update_request_data_filters_conjunction import ViewsUpdateRequestDataFiltersConjunction -from .views_update_request_data_filters_items_item import ViewsUpdateRequestDataFiltersItemsItem +import typing +from .views_update_request_data_filters_items_item import ( + ViewsUpdateRequestDataFiltersItemsItem, +) +from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsUpdateRequestDataFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py index bbf3aeab9..6936767e2 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py @@ -1,12 +1,18 @@ # This file was auto-generated by Fern from our API Definition. -import typing - +from ...core.pydantic_utilities import UniversalBaseModel +from .views_update_request_data_filters_items_item_filter import ( + ViewsUpdateRequestDataFiltersItemsItemFilter, +) import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .views_update_request_data_filters_items_item_filter import ViewsUpdateRequestDataFiltersItemsItemFilter -from .views_update_request_data_filters_items_item_operator import ViewsUpdateRequestDataFiltersItemsItemOperator -from .views_update_request_data_filters_items_item_value import ViewsUpdateRequestDataFiltersItemsItemValue +from .views_update_request_data_filters_items_item_operator import ( + ViewsUpdateRequestDataFiltersItemsItemOperator, +) +from .views_update_request_data_filters_items_item_value import ( + ViewsUpdateRequestDataFiltersItemsItemValue, +) +from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing class ViewsUpdateRequestDataFiltersItemsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/webhooks/__init__.py b/src/label_studio_sdk/webhooks/__init__.py index 2a2583ff5..338fdac42 100644 --- a/src/label_studio_sdk/webhooks/__init__.py +++ b/src/label_studio_sdk/webhooks/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import WebhooksUpdateRequestActionsItem __all__ = ["WebhooksUpdateRequestActionsItem"] diff --git a/src/label_studio_sdk/webhooks/client.py b/src/label_studio_sdk/webhooks/client.py index c752a19ad..8fa48c0de 100644 --- a/src/label_studio_sdk/webhooks/client.py +++ b/src/label_studio_sdk/webhooks/client.py @@ -1,16 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper from ..core.request_options import RequestOptions from ..types.webhook import Webhook +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError from ..types.webhook_actions_item import WebhookActionsItem -from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate -from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem -from .raw_client import AsyncRawWebhooksClient, RawWebhooksClient +import datetime as dt +from ..core.jsonable_encoder import jsonable_encoder from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem +from ..types.webhook_serializer_for_update_actions_item import ( + WebhookSerializerForUpdateActionsItem, +) +from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate +from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,21 +23,13 @@ class WebhooksClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawWebhooksClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawWebhooksClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawWebhooksClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( - self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[Webhook]: """ @@ -64,8 +61,27 @@ def list( ) client.webhooks.list() """ - _response = self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -142,21 +158,38 @@ def create( url="url", ) """ - _response = self._raw_client.create( - url=url, - id=id, - organization=organization, - project=project, - send_payload=send_payload, - send_for_all_actions=send_for_all_actions, - headers=headers, - is_active=is_active, - actions=actions, - created_at=created_at, - updated_at=updated_at, + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def info( self, @@ -189,8 +222,21 @@ def info( ) client.webhooks.info() """ - _response = self._raw_client.info(organization_only=organization_only, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/info/", + method="GET", + params={ + "organization-only": organization_only, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ @@ -223,8 +269,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -256,8 +318,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -270,7 +342,10 @@ def update( headers: typing.Optional[str] = None, is_active: typing.Optional[bool] = None, actions: typing.Optional[ - typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] + typing.Union[ + WebhooksUpdateRequestActionsItem, + typing.Sequence[WebhooksUpdateRequestActionsItem], + ] ] = None, id: typing.Optional[int] = OMIT, organization: typing.Optional[int] = OMIT, @@ -364,47 +439,57 @@ def update( webhook_serializer_for_update_url="url", ) """ - _response = self._raw_client.update( - id_, - url=url, - webhook_serializer_for_update_url=webhook_serializer_for_update_url, - send_payload=send_payload, - send_for_all_actions=send_for_all_actions, - headers=headers, - is_active=is_active, - actions=actions, - id=id, - organization=organization, - project=project, - webhook_serializer_for_update_send_payload=webhook_serializer_for_update_send_payload, - webhook_serializer_for_update_send_for_all_actions=webhook_serializer_for_update_send_for_all_actions, - webhook_serializer_for_update_headers=webhook_serializer_for_update_headers, - webhook_serializer_for_update_is_active=webhook_serializer_for_update_is_active, - webhook_serializer_for_update_actions=webhook_serializer_for_update_actions, - created_at=created_at, - updated_at=updated_at, + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id_)}/", + method="PATCH", + params={ + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + }, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncWebhooksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawWebhooksClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawWebhooksClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawWebhooksClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( - self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + self, + *, + project: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> typing.List[Webhook]: """ @@ -444,8 +529,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(project=project, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -530,21 +634,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - url=url, - id=id, - organization=organization, - project=project, - send_payload=send_payload, - send_for_all_actions=send_for_all_actions, - headers=headers, - is_active=is_active, - actions=actions, - created_at=created_at, - updated_at=updated_at, + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def info( self, @@ -585,8 +706,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.info(organization_only=organization_only, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/info/", + method="GET", + params={ + "organization-only": organization_only, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ @@ -627,8 +761,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -668,8 +818,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -682,7 +842,10 @@ async def update( headers: typing.Optional[str] = None, is_active: typing.Optional[bool] = None, actions: typing.Optional[ - typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] + typing.Union[ + WebhooksUpdateRequestActionsItem, + typing.Sequence[WebhooksUpdateRequestActionsItem], + ] ] = None, id: typing.Optional[int] = OMIT, organization: typing.Optional[int] = OMIT, @@ -784,25 +947,43 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id_, - url=url, - webhook_serializer_for_update_url=webhook_serializer_for_update_url, - send_payload=send_payload, - send_for_all_actions=send_for_all_actions, - headers=headers, - is_active=is_active, - actions=actions, - id=id, - organization=organization, - project=project, - webhook_serializer_for_update_send_payload=webhook_serializer_for_update_send_payload, - webhook_serializer_for_update_send_for_all_actions=webhook_serializer_for_update_send_for_all_actions, - webhook_serializer_for_update_headers=webhook_serializer_for_update_headers, - webhook_serializer_for_update_is_active=webhook_serializer_for_update_is_active, - webhook_serializer_for_update_actions=webhook_serializer_for_update_actions, - created_at=created_at, - updated_at=updated_at, + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id_)}/", + method="PATCH", + params={ + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + }, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/webhooks/raw_client.py b/src/label_studio_sdk/webhooks/raw_client.py deleted file mode 100644 index 0953cf927..000000000 --- a/src/label_studio_sdk/webhooks/raw_client.py +++ /dev/null @@ -1,824 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.webhook import Webhook -from ..types.webhook_actions_item import WebhookActionsItem -from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate -from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem -from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawWebhooksClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[Webhook]]: - """ - - List all webhooks set up for your organization. - - Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. - - For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). - - Parameters - ---------- - project : typing.Optional[str] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Webhook]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Webhook], - parse_obj_as( - type_=typing.List[Webhook], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - url: str, - id: typing.Optional[int] = OMIT, - organization: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - send_payload: typing.Optional[bool] = OMIT, - send_for_all_actions: typing.Optional[bool] = OMIT, - headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - is_active: typing.Optional[bool] = OMIT, - actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Webhook]: - """ - - Create a webhook. - Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). - - If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). - - Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. - - Parameters - ---------- - url : str - URL of webhook - - id : typing.Optional[int] - - organization : typing.Optional[int] - - project : typing.Optional[int] - - send_payload : typing.Optional[bool] - If value is False send only action - - send_for_all_actions : typing.Optional[bool] - If value is False - used only for actions from WebhookAction - - headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Key Value Json of headers - - is_active : typing.Optional[bool] - If value is False the webhook is disabled - - actions : typing.Optional[typing.Sequence[WebhookActionsItem]] - - created_at : typing.Optional[dt.datetime] - Creation time - - updated_at : typing.Optional[dt.datetime] - Last update time - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Webhook] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="POST", - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def info( - self, - *, - organization_only: typing.Optional[bool] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[None]: - """ - - Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - organization_only : typing.Optional[bool] - organization-only or not - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/info/", - method="GET", - params={ - "organization-only": organization_only, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Webhook]: - """ - - Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). - - For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - id : int - A unique integer value identifying this webhook. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Webhook] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). - - For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - id : int - A unique integer value identifying this webhook. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id_: int, - *, - url: str, - webhook_serializer_for_update_url: str, - send_payload: typing.Optional[bool] = None, - send_for_all_actions: typing.Optional[bool] = None, - headers: typing.Optional[str] = None, - is_active: typing.Optional[bool] = None, - actions: typing.Optional[ - typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] - ] = None, - id: typing.Optional[int] = OMIT, - organization: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_actions: typing.Optional[ - typing.Sequence[WebhookSerializerForUpdateActionsItem] - ] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[WebhookSerializerForUpdate]: - """ - - Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). - - For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - id_ : int - A unique integer value identifying this webhook. - - url : str - URL of webhook - - webhook_serializer_for_update_url : str - URL of webhook - - send_payload : typing.Optional[bool] - If value is False send only action - - send_for_all_actions : typing.Optional[bool] - If value is False - used only for actions from WebhookAction - - headers : typing.Optional[str] - Key Value Json of headers - - is_active : typing.Optional[bool] - If value is False the webhook is disabled - - actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] - - id : typing.Optional[int] - - organization : typing.Optional[int] - - project : typing.Optional[int] - - webhook_serializer_for_update_send_payload : typing.Optional[bool] - If value is False send only action - - webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] - If value is False - used only for actions from WebhookAction - - webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Key Value Json of headers - - webhook_serializer_for_update_is_active : typing.Optional[bool] - If value is False the webhook is disabled - - webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] - - created_at : typing.Optional[dt.datetime] - Creation time - - updated_at : typing.Optional[dt.datetime] - Last update time - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[WebhookSerializerForUpdate] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id_)}/", - method="PATCH", - params={ - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - }, - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - WebhookSerializerForUpdate, - parse_obj_as( - type_=WebhookSerializerForUpdate, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawWebhooksClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[Webhook]]: - """ - - List all webhooks set up for your organization. - - Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. - - For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). - - Parameters - ---------- - project : typing.Optional[str] - Project ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Webhook]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Webhook], - parse_obj_as( - type_=typing.List[Webhook], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - url: str, - id: typing.Optional[int] = OMIT, - organization: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - send_payload: typing.Optional[bool] = OMIT, - send_for_all_actions: typing.Optional[bool] = OMIT, - headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - is_active: typing.Optional[bool] = OMIT, - actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Webhook]: - """ - - Create a webhook. - Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). - - If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). - - Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. - - Parameters - ---------- - url : str - URL of webhook - - id : typing.Optional[int] - - organization : typing.Optional[int] - - project : typing.Optional[int] - - send_payload : typing.Optional[bool] - If value is False send only action - - send_for_all_actions : typing.Optional[bool] - If value is False - used only for actions from WebhookAction - - headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Key Value Json of headers - - is_active : typing.Optional[bool] - If value is False the webhook is disabled - - actions : typing.Optional[typing.Sequence[WebhookActionsItem]] - - created_at : typing.Optional[dt.datetime] - Creation time - - updated_at : typing.Optional[dt.datetime] - Last update time - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Webhook] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="POST", - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def info( - self, - *, - organization_only: typing.Optional[bool] = None, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[None]: - """ - - Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - organization_only : typing.Optional[bool] - organization-only or not - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/info/", - method="GET", - params={ - "organization-only": organization_only, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Webhook]: - """ - - Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). - - For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - id : int - A unique integer value identifying this webhook. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Webhook] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). - - For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - id : int - A unique integer value identifying this webhook. - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id_: int, - *, - url: str, - webhook_serializer_for_update_url: str, - send_payload: typing.Optional[bool] = None, - send_for_all_actions: typing.Optional[bool] = None, - headers: typing.Optional[str] = None, - is_active: typing.Optional[bool] = None, - actions: typing.Optional[ - typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] - ] = None, - id: typing.Optional[int] = OMIT, - organization: typing.Optional[int] = OMIT, - project: typing.Optional[int] = OMIT, - webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, - webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, - webhook_serializer_for_update_actions: typing.Optional[ - typing.Sequence[WebhookSerializerForUpdateActionsItem] - ] = OMIT, - created_at: typing.Optional[dt.datetime] = OMIT, - updated_at: typing.Optional[dt.datetime] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[WebhookSerializerForUpdate]: - """ - - Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). - - For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). - - Parameters - ---------- - id_ : int - A unique integer value identifying this webhook. - - url : str - URL of webhook - - webhook_serializer_for_update_url : str - URL of webhook - - send_payload : typing.Optional[bool] - If value is False send only action - - send_for_all_actions : typing.Optional[bool] - If value is False - used only for actions from WebhookAction - - headers : typing.Optional[str] - Key Value Json of headers - - is_active : typing.Optional[bool] - If value is False the webhook is disabled - - actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] - - id : typing.Optional[int] - - organization : typing.Optional[int] - - project : typing.Optional[int] - - webhook_serializer_for_update_send_payload : typing.Optional[bool] - If value is False send only action - - webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] - If value is False - used only for actions from WebhookAction - - webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Key Value Json of headers - - webhook_serializer_for_update_is_active : typing.Optional[bool] - If value is False the webhook is disabled - - webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] - - created_at : typing.Optional[dt.datetime] - Creation time - - updated_at : typing.Optional[dt.datetime] - Last update time - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[WebhookSerializerForUpdate] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id_)}/", - method="PATCH", - params={ - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - }, - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - WebhookSerializerForUpdate, - parse_obj_as( - type_=WebhookSerializerForUpdate, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/webhooks/types/__init__.py b/src/label_studio_sdk/webhooks/types/__init__.py index 2acf204b9..5c47f8599 100644 --- a/src/label_studio_sdk/webhooks/types/__init__.py +++ b/src/label_studio_sdk/webhooks/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem __all__ = ["WebhooksUpdateRequestActionsItem"] diff --git a/src/label_studio_sdk/workspaces/__init__.py b/src/label_studio_sdk/workspaces/__init__.py index 795aaf483..ddc7fa13c 100644 --- a/src/label_studio_sdk/workspaces/__init__.py +++ b/src/label_studio_sdk/workspaces/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from . import members from .members import MembersCreateResponse, MembersListResponseItem diff --git a/src/label_studio_sdk/workspaces/client.py b/src/label_studio_sdk/workspaces/client.py index 2de101cc5..413ac6b3a 100644 --- a/src/label_studio_sdk/workspaces/client.py +++ b/src/label_studio_sdk/workspaces/client.py @@ -1,12 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.client_wrapper import SyncClientWrapper +from .members.client import MembersClient from ..core.request_options import RequestOptions from ..types.workspace import Workspace -from .members.client import AsyncMembersClient, MembersClient -from .raw_client import AsyncRawWorkspacesClient, RawWorkspacesClient +from ..core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..core.jsonable_encoder import jsonable_encoder +from ..core.client_wrapper import AsyncClientWrapper +from .members.client import AsyncMembersClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -14,19 +18,8 @@ class WorkspacesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawWorkspacesClient(client_wrapper=client_wrapper) - self.members = MembersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawWorkspacesClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawWorkspacesClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.members = MembersClient(client_wrapper=self._client_wrapper) def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ @@ -56,8 +49,24 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.workspaces.list() """ - _response = self._raw_client.list(request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -115,16 +124,36 @@ def create( ) client.workspaces.create() """ - _response = self._raw_client.create( - title=title, - description=description, - is_public=is_public, - is_personal=is_personal, - color=color, - is_archived=is_archived, + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ @@ -155,8 +184,24 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._raw_client.get(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -186,8 +231,18 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def update( self, @@ -247,34 +302,42 @@ def update( id=1, ) """ - _response = self._raw_client.update( - id, - title=title, - description=description, - is_public=is_public, - is_personal=is_personal, - color=color, - is_archived=is_archived, + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncWorkspacesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawWorkspacesClient(client_wrapper=client_wrapper) - self.members = AsyncMembersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawWorkspacesClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawWorkspacesClient - """ - return self._raw_client + self._client_wrapper = client_wrapper + self.members = AsyncMembersClient(client_wrapper=self._client_wrapper) async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ @@ -312,8 +375,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -379,16 +458,36 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create( - title=title, - description=description, - is_public=is_public, - is_personal=is_personal, - color=color, - is_archived=is_archived, + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ @@ -427,8 +526,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.get(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -466,8 +581,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def update( self, @@ -535,14 +660,33 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.update( - id, - title=title, - description=description, - is_public=is_public, - is_personal=is_personal, - color=color, - is_archived=is_archived, + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, request_options=request_options, + omit=OMIT, ) - return _response.data + try: + if 200 <= _response.status_code < 300: + return typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/workspaces/members/__init__.py b/src/label_studio_sdk/workspaces/members/__init__.py index a527383e7..2e3a8f37d 100644 --- a/src/label_studio_sdk/workspaces/members/__init__.py +++ b/src/label_studio_sdk/workspaces/members/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .types import MembersCreateResponse, MembersListResponseItem __all__ = ["MembersCreateResponse", "MembersListResponseItem"] diff --git a/src/label_studio_sdk/workspaces/members/client.py b/src/label_studio_sdk/workspaces/members/client.py index 21e304759..8335b4483 100644 --- a/src/label_studio_sdk/workspaces/members/client.py +++ b/src/label_studio_sdk/workspaces/members/client.py @@ -1,12 +1,15 @@ # This file was auto-generated by Fern from our API Definition. import typing - -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.client_wrapper import SyncClientWrapper from ...core.request_options import RequestOptions -from .raw_client import AsyncRawMembersClient, RawMembersClient -from .types.members_create_response import MembersCreateResponse from .types.members_list_response_item import MembersListResponseItem +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +from .types.members_create_response import MembersCreateResponse +from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -14,18 +17,7 @@ class MembersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._raw_client = RawMembersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> RawMembersClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - RawMembersClient - """ - return self._raw_client + self._client_wrapper = client_wrapper def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -58,11 +50,31 @@ def list( id=1, ) """ - _response = self._raw_client.list(id, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def create( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + *, + user: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> MembersCreateResponse: """ @@ -95,11 +107,38 @@ def create( id=1, ) """ - _response = self._raw_client.create(id, user=user, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) def delete( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + *, + user: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ @@ -131,24 +170,30 @@ def delete( id=1, ) """ - _response = self._raw_client.delete(id, user=user, request_options=request_options) - return _response.data + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) class AsyncMembersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._raw_client = AsyncRawMembersClient(client_wrapper=client_wrapper) - - @property - def with_raw_response(self) -> AsyncRawMembersClient: - """ - Retrieves a raw implementation of this client that returns raw responses. - - Returns - ------- - AsyncRawMembersClient - """ - return self._raw_client + self._client_wrapper = client_wrapper async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -189,11 +234,31 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.list(id, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def create( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + *, + user: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> MembersCreateResponse: """ @@ -234,11 +299,38 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.create(id, user=user, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) async def delete( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + self, + id: int, + *, + user: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, ) -> None: """ @@ -278,5 +370,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._raw_client.delete(id, user=user, request_options=request_options) - return _response.data + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/workspaces/members/raw_client.py b/src/label_studio_sdk/workspaces/members/raw_client.py deleted file mode 100644 index 0bb64c622..000000000 --- a/src/label_studio_sdk/workspaces/members/raw_client.py +++ /dev/null @@ -1,290 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.http_response import AsyncHttpResponse, HttpResponse -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from ...core.request_options import RequestOptions -from .types.members_create_response import MembersCreateResponse -from .types.members_list_response_item import MembersListResponseItem - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawMembersClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[typing.List[MembersListResponseItem]]: - """ - - List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[MembersListResponseItem]] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[MembersListResponseItem], - parse_obj_as( - type_=typing.List[MembersListResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[MembersCreateResponse]: - """ - - Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - user : typing.Optional[int] - User ID of the workspace member - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[MembersCreateResponse] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="POST", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MembersCreateResponse, - parse_obj_as( - type_=MembersCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None - ) -> HttpResponse[None]: - """ - - Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). - - Parameters - ---------- - id : int - Workspace ID - - user : typing.Optional[int] - User ID of the workspace member - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="DELETE", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawMembersClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[MembersListResponseItem]]: - """ - - List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[MembersListResponseItem]] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[MembersListResponseItem], - parse_obj_as( - type_=typing.List[MembersListResponseItem], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[MembersCreateResponse]: - """ - - Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - user : typing.Optional[int] - User ID of the workspace member - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[MembersCreateResponse] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="POST", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - MembersCreateResponse, - parse_obj_as( - type_=MembersCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). - - Parameters - ---------- - id : int - Workspace ID - - user : typing.Optional[int] - User ID of the workspace member - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="DELETE", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/workspaces/members/types/__init__.py b/src/label_studio_sdk/workspaces/members/types/__init__.py index b1aa23227..b6f51dbc8 100644 --- a/src/label_studio_sdk/workspaces/members/types/__init__.py +++ b/src/label_studio_sdk/workspaces/members/types/__init__.py @@ -1,7 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -# isort: skip_file - from .members_create_response import MembersCreateResponse from .members_list_response_item import MembersListResponseItem diff --git a/src/label_studio_sdk/workspaces/members/types/members_create_response.py b/src/label_studio_sdk/workspaces/members/types/members_create_response.py index 38c11f695..13ed6d9ba 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_create_response.py +++ b/src/label_studio_sdk/workspaces/members/types/members_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class MembersCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py index 0428f3e9a..d436b704e 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py +++ b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. +from ....core.pydantic_utilities import UniversalBaseModel import typing - import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....core.pydantic_utilities import IS_PYDANTIC_V2 class MembersListResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/workspaces/raw_client.py b/src/label_studio_sdk/workspaces/raw_client.py deleted file mode 100644 index e3dfdb351..000000000 --- a/src/label_studio_sdk/workspaces/raw_client.py +++ /dev/null @@ -1,561 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -from json.decoder import JSONDecodeError - -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ..core.http_response import AsyncHttpResponse, HttpResponse -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from ..core.request_options import RequestOptions -from ..types.workspace import Workspace - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class RawWorkspacesClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[Workspace]]: - """ - - List all workspaces for your organization. - - Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. - - For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[typing.List[Workspace]] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/workspaces", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Workspace], - parse_obj_as( - type_=typing.List[Workspace], # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - is_public: typing.Optional[bool] = OMIT, - is_personal: typing.Optional[bool] = OMIT, - color: typing.Optional[str] = OMIT, - is_archived: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Workspace]: - """ - - Create a new workspace. - - Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. - - For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). - - Parameters - ---------- - title : typing.Optional[str] - Workspace title - - description : typing.Optional[str] - Workspace description - - is_public : typing.Optional[bool] - Is workspace public - - is_personal : typing.Optional[bool] - Is workspace personal - - color : typing.Optional[str] - Workspace color in HEX format - - is_archived : typing.Optional[bool] - Is workspace archived - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Workspace] - - """ - _response = self._client_wrapper.httpx_client.request( - "api/workspaces", - method="POST", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Workspace]: - """ - - Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Workspace] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: - """ - - Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[None] - """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return HttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - is_public: typing.Optional[bool] = OMIT, - is_personal: typing.Optional[bool] = OMIT, - color: typing.Optional[str] = OMIT, - is_archived: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> HttpResponse[Workspace]: - """ - - Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - title : typing.Optional[str] - Workspace title - - description : typing.Optional[str] - Workspace description - - is_public : typing.Optional[bool] - Is workspace public - - is_personal : typing.Optional[bool] - Is workspace personal - - color : typing.Optional[str] - Workspace color in HEX format - - is_archived : typing.Optional[bool] - Is workspace archived - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - HttpResponse[Workspace] - - """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - return HttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - -class AsyncRawWorkspacesClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list( - self, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[typing.List[Workspace]]: - """ - - List all workspaces for your organization. - - Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. - - For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). - - Parameters - ---------- - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[typing.List[Workspace]] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/workspaces", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - typing.List[Workspace], - parse_obj_as( - type_=typing.List[Workspace], # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def create( - self, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - is_public: typing.Optional[bool] = OMIT, - is_personal: typing.Optional[bool] = OMIT, - color: typing.Optional[str] = OMIT, - is_archived: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Workspace]: - """ - - Create a new workspace. - - Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. - - For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). - - Parameters - ---------- - title : typing.Optional[str] - Workspace title - - description : typing.Optional[str] - Workspace description - - is_public : typing.Optional[bool] - Is workspace public - - is_personal : typing.Optional[bool] - Is workspace personal - - color : typing.Optional[str] - Workspace color in HEX format - - is_archived : typing.Optional[bool] - Is workspace archived - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Workspace] - - """ - _response = await self._client_wrapper.httpx_client.request( - "api/workspaces", - method="POST", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def get( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[Workspace]: - """ - - Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Workspace] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def delete( - self, id: int, *, request_options: typing.Optional[RequestOptions] = None - ) -> AsyncHttpResponse[None]: - """ - - Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[None] - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return AsyncHttpResponse(response=_response, data=None) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) - - async def update( - self, - id: int, - *, - title: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - is_public: typing.Optional[bool] = OMIT, - is_personal: typing.Optional[bool] = OMIT, - color: typing.Optional[str] = OMIT, - is_archived: typing.Optional[bool] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> AsyncHttpResponse[Workspace]: - """ - - Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). - - Parameters - ---------- - id : int - Workspace ID - - title : typing.Optional[str] - Workspace title - - description : typing.Optional[str] - Workspace description - - is_public : typing.Optional[bool] - Is workspace public - - is_personal : typing.Optional[bool] - Is workspace personal - - color : typing.Optional[str] - Workspace color in HEX format - - is_archived : typing.Optional[bool] - Is workspace archived - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - AsyncHttpResponse[Workspace] - - """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - _data = typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - return AsyncHttpResponse(response=_response, data=_data) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) - raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..dedf0c76b --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,22 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +import os +import pytest +from label_studio_sdk import AsyncLabelStudio + + +@pytest.fixture +def client() -> LabelStudio: + return LabelStudio( + api_key=os.getenv("ENV_API_KEY", "api_key"), + base_url=os.getenv("TESTS_BASE_URL", "base_url"), + ) + + +@pytest.fixture +def async_client() -> AsyncLabelStudio: + return AsyncLabelStudio( + api_key=os.getenv("ENV_API_KEY", "api_key"), + base_url=os.getenv("TESTS_BASE_URL", "base_url"), + ) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index ab04ce639..73f811f5e 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -4,4 +4,4 @@ # Get started with writing tests with pytest at https://docs.pytest.org @pytest.mark.skip(reason="Unimplemented") def test_client() -> None: - assert True + assert True == True diff --git a/tests/export_storage/__init__.py b/tests/export_storage/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/export_storage/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/export_storage/test_azure.py b/tests/export_storage/test_azure.py new file mode 100644 index 000000000..0b645b804 --- /dev/null +++ b/tests/export_storage/test_azure.py @@ -0,0 +1,251 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "container": "container", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "account_name": "account_name", + "account_key": "account_key", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "container": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "account_name": None, + "account_key": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + }, + ) + response = client.export_storage.azure.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "container": "container", + "prefix": "prefix", + "account_name": "account_name", + "account_key": "account_key", + } + expected_types: typing.Any = { + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "container": None, + "prefix": None, + "account_name": None, + "account_key": None, + } + response = client.export_storage.azure.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "container": "container", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "account_name": "account_name", + "account_key": "account_key", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "container": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "account_name": None, + "account_key": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + response = client.export_storage.azure.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "container": "container", + "prefix": "prefix", + "account_name": "account_name", + "account_key": "account_key", + } + expected_types: typing.Any = { + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "container": None, + "prefix": None, + "account_name": None, + "account_key": None, + } + response = client.export_storage.azure.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "container": "container", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "account_name": "account_name", + "account_key": "account_key", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "container": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "account_name": None, + "account_key": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + response = client.export_storage.azure.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_gcs.py b/tests/export_storage/test_gcs.py new file mode 100644 index 000000000..7c6f7a22b --- /dev/null +++ b/tests/export_storage/test_gcs.py @@ -0,0 +1,251 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "google_application_credentials": None, + "google_project_id": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + }, + ) + response = client.export_storage.gcs.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcs.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + } + expected_types: typing.Any = { + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "google_application_credentials": None, + "google_project_id": None, + } + response = client.export_storage.gcs.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcs.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "google_application_credentials": None, + "google_project_id": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + response = client.export_storage.gcs.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcs.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + } + expected_types: typing.Any = { + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "google_application_credentials": None, + "google_project_id": None, + } + response = client.export_storage.gcs.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcs.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "google_application_credentials": None, + "google_project_id": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + response = client.export_storage.gcs.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcs.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_local.py b/tests/export_storage/test_local.py new file mode 100644 index 000000000..f39bae76b --- /dev/null +++ b/tests/export_storage/test_local.py @@ -0,0 +1,225 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + }, + ) + response = client.export_storage.local.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.local.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "title": None, + "description": None, + "project": "integer", + "path": None, + "regex_filter": None, + "use_blob_urls": None, + } + response = client.export_storage.local.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.local.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.local.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.local.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + response = client.export_storage.local.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.local.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "title": None, + "description": None, + "project": "integer", + "path": None, + "regex_filter": None, + "use_blob_urls": None, + } + response = client.export_storage.local.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.local.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "project": "integer", + } + response = client.export_storage.local.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.local.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_redis.py b/tests/export_storage/test_redis.py new file mode 100644 index 000000000..32e207ab9 --- /dev/null +++ b/tests/export_storage/test_redis.py @@ -0,0 +1,261 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "db": 1, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "host": None, + "port": None, + "password": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "db": "integer", + "project": "integer", + } + }, + ) + response = client.export_storage.redis.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.redis.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "db": 1, + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + } + expected_types: typing.Any = { + "db": "integer", + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "path": None, + "host": None, + "port": None, + "password": None, + } + response = client.export_storage.redis.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.redis.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "db": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "host": None, + "port": None, + "password": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "db": "integer", + "project": "integer", + } + response = client.export_storage.redis.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.redis.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "db": 1, + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + } + expected_types: typing.Any = { + "db": "integer", + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "path": None, + "host": None, + "port": None, + "password": None, + } + response = client.export_storage.redis.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.redis.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "db": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "host": None, + "port": None, + "password": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "db": "integer", + "project": "integer", + } + response = client.export_storage.redis.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.redis.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_s3.py b/tests/export_storage/test_s3.py new file mode 100644 index 000000000..ee8909b6d --- /dev/null +++ b/tests/export_storage/test_s3.py @@ -0,0 +1,291 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + }, + ) + response = client.export_storage.s3.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + } + expected_types: typing.Any = { + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + } + response = client.export_storage.s3.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "can_delete_objects": True, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + } + expected_types: typing.Any = { + "can_delete_objects": None, + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + } + response = client.export_storage.s3.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "can_delete_objects": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "can_delete_objects": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_s3s.py b/tests/export_storage/test_s3s.py new file mode 100644 index 000000000..f9efabd90 --- /dev/null +++ b/tests/export_storage/test_s3s.py @@ -0,0 +1,175 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + }, + ) + response = client.export_storage.s3s.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3s.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3s.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "external_id": "external_id", + "role_arn": "role_arn", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "external_id": None, + "role_arn": None, + "region_name": None, + "s3_endpoint": None, + "project": "integer", + } + response = client.export_storage.s3s.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.s3s.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) diff --git a/tests/import_storage/__init__.py b/tests/import_storage/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/import_storage/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/import_storage/test_azure.py b/tests/import_storage/test_azure.py new file mode 100644 index 000000000..da52ad2c2 --- /dev/null +++ b/tests/import_storage/test_azure.py @@ -0,0 +1,269 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "container": "container", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "account_name": "account_name", + "account_key": "account_key", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "presign_ttl": 1, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "container": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "account_name": None, + "account_key": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "presign_ttl": "integer", + "project": "integer", + } + }, + ) + response = client.import_storage.azure.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "presign": True, + "presign_ttl": 1, + "title": "title", + "description": "description", + "project": 1, + "container": "container", + "prefix": "prefix", + "account_name": "account_name", + "account_key": "account_key", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "presign": None, + "presign_ttl": "integer", + "title": None, + "description": None, + "project": "integer", + "container": None, + "prefix": None, + "account_name": None, + "account_key": None, + } + response = client.import_storage.azure.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.azure.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "container": "container", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "account_name": "account_name", + "account_key": "account_key", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "presign_ttl": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "container": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "account_name": None, + "account_key": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "presign_ttl": "integer", + "project": "integer", + } + response = client.import_storage.azure.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "presign": True, + "presign_ttl": 1, + "title": "title", + "description": "description", + "project": 1, + "container": "container", + "prefix": "prefix", + "account_name": "account_name", + "account_key": "account_key", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "presign": None, + "presign_ttl": "integer", + "title": None, + "description": None, + "project": "integer", + "container": None, + "prefix": None, + "account_name": None, + "account_key": None, + } + response = client.import_storage.azure.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "container": "container", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "account_name": "account_name", + "account_key": "account_key", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "presign_ttl": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "container": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "account_name": None, + "account_key": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "presign_ttl": "integer", + "project": "integer", + } + response = client.import_storage.azure.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_gcs.py b/tests/import_storage/test_gcs.py new file mode 100644 index 000000000..e44d38d91 --- /dev/null +++ b/tests/import_storage/test_gcs.py @@ -0,0 +1,269 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "presign_ttl": 1, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "google_application_credentials": None, + "google_project_id": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "presign_ttl": "integer", + "project": "integer", + } + }, + ) + response = client.import_storage.gcs.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcs.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "presign": True, + "presign_ttl": 1, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "presign": None, + "presign_ttl": "integer", + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "google_application_credentials": None, + "google_project_id": None, + } + response = client.import_storage.gcs.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcs.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.gcs.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "presign_ttl": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "google_application_credentials": None, + "google_project_id": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "presign_ttl": "integer", + "project": "integer", + } + response = client.import_storage.gcs.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcs.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "presign": True, + "presign_ttl": 1, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "presign": None, + "presign_ttl": "integer", + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "google_application_credentials": None, + "google_project_id": None, + } + response = client.import_storage.gcs.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcs.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "presign_ttl": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "google_application_credentials": None, + "google_project_id": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "presign_ttl": "integer", + "project": "integer", + } + response = client.import_storage.gcs.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcs.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_local.py b/tests/import_storage/test_local.py new file mode 100644 index 000000000..75f0bff65 --- /dev/null +++ b/tests/import_storage/test_local.py @@ -0,0 +1,219 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "project": "integer", + } + }, + ) + response = client.import_storage.local.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.local.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "title": None, + "description": None, + "project": "integer", + "path": None, + "regex_filter": None, + "use_blob_urls": None, + } + response = client.import_storage.local.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.local.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.local.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.local.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "project": "integer", + } + response = client.import_storage.local.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.local.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "title": None, + "description": None, + "project": "integer", + "path": None, + "regex_filter": None, + "use_blob_urls": None, + } + response = client.import_storage.local.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.local.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "project": "integer", + } + response = client.import_storage.local.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.local.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_redis.py b/tests/import_storage/test_redis.py new file mode 100644 index 000000000..01cd0696e --- /dev/null +++ b/tests/import_storage/test_redis.py @@ -0,0 +1,255 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "db": 1, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "host": None, + "port": None, + "password": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "db": "integer", + "project": "integer", + } + }, + ) + response = client.import_storage.redis.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.redis.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "title": None, + "description": None, + "project": "integer", + "path": None, + "host": None, + "port": None, + "password": None, + } + response = client.import_storage.redis.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.redis.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.redis.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "db": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "host": None, + "port": None, + "password": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "db": "integer", + "project": "integer", + } + response = client.import_storage.redis.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.redis.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "title": "title", + "description": "description", + "project": 1, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "title": None, + "description": None, + "project": "integer", + "path": None, + "host": None, + "port": None, + "password": None, + } + response = client.import_storage.redis.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.redis.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "path": "path", + "host": "host", + "port": "port", + "password": "password", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "db": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "path": None, + "host": None, + "port": None, + "password": None, + "regex_filter": None, + "use_blob_urls": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "db": "integer", + "project": "integer", + } + response = client.import_storage.redis.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.redis.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_s3.py b/tests/import_storage/test_s3.py new file mode 100644 index 000000000..3b706703d --- /dev/null +++ b/tests/import_storage/test_s3.py @@ -0,0 +1,319 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + }, + ) + response = client.import_storage.s3.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "presign": True, + "presign_ttl": 1, + "recursive_scan": True, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "presign": None, + "presign_ttl": "integer", + "recursive_scan": None, + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + } + response = client.import_storage.s3.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.s3.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "regex_filter": "regex_filter", + "use_blob_urls": True, + "presign": True, + "presign_ttl": 1, + "recursive_scan": True, + "title": "title", + "description": "description", + "project": 1, + "bucket": "bucket", + "prefix": "prefix", + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + } + expected_types: typing.Any = { + "regex_filter": None, + "use_blob_urls": None, + "presign": None, + "presign_ttl": "integer", + "recursive_scan": None, + "title": None, + "description": None, + "project": "integer", + "bucket": None, + "prefix": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + } + response = client.import_storage.s3.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "type": "type", + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "aws_access_key_id": "aws_access_key_id", + "aws_secret_access_key": "aws_secret_access_key", + "aws_session_token": "aws_session_token", + "aws_sse_kms_key_id": "aws_sse_kms_key_id", + "region_name": "region_name", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "type": None, + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "aws_access_key_id": None, + "aws_secret_access_key": None, + "aws_session_token": None, + "aws_sse_kms_key_id": None, + "region_name": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_s3s.py b/tests/import_storage/test_s3s.py new file mode 100644 index 000000000..96ba46e7a --- /dev/null +++ b/tests/import_storage/test_s3s.py @@ -0,0 +1,329 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + }, + ) + response = client.import_storage.s3s.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.s3s.validate() # type: ignore[func-returns-value] + is None + ) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "synchronizable": True, + "presign": True, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "status": "initialized", + "traceback": "traceback", + "meta": {"key": "value"}, + "title": "title", + "description": "description", + "created_at": "2024-01-15T09:30:00Z", + "bucket": "bucket", + "prefix": "prefix", + "regex_filter": "regex_filter", + "use_blob_urls": True, + "region_name": "region_name", + "external_id": "external_id", + "role_arn": "role_arn", + "s3_endpoint": "s3_endpoint", + "presign_ttl": 1, + "recursive_scan": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "synchronizable": None, + "presign": None, + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "status": None, + "traceback": None, + "meta": ("dict", {0: (None, None)}), + "title": None, + "description": None, + "created_at": "datetime", + "bucket": None, + "prefix": None, + "regex_filter": None, + "use_blob_urls": None, + "region_name": None, + "external_id": None, + "role_arn": None, + "s3_endpoint": None, + "presign_ttl": "integer", + "recursive_scan": None, + "project": "integer", + } + response = client.import_storage.s3s.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.s3s.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/projects/__init__.py b/tests/projects/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/projects/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/projects/test_exports.py b/tests/projects/test_exports.py new file mode 100644 index 000000000..42b059812 --- /dev/null +++ b/tests/projects/test_exports.py @@ -0,0 +1,265 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_formats(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "name": "JSON", + "title": "title", + "description": "description", + "link": "link", + "tags": ["tags"], + "disabled": True, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "name": None, + "title": None, + "description": None, + "link": None, + "tags": ("list", {0: None}), + "disabled": None, + } + }, + ) + response = client.projects.exports.list_formats(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.exports.list_formats(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "title": "title", + "id": 1, + "created_by": { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "email": "email", + "avatar": "avatar", + }, + "created_at": "2024-01-15T09:30:00Z", + "finished_at": "2024-01-15T09:30:00Z", + "status": "created", + "md5": "md5", + "counters": {"key": "value"}, + "converted_formats": [{"export_type": "export_type"}], + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "title": None, + "id": "integer", + "created_by": { + "id": "integer", + "first_name": None, + "last_name": None, + "email": None, + "avatar": None, + }, + "created_at": "datetime", + "finished_at": "datetime", + "status": None, + "md5": None, + "counters": ("dict", {0: (None, None)}), + "converted_formats": ("list", {0: {"export_type": None}}), + } + }, + ) + response = client.projects.exports.list(project_id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.exports.list(project_id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "id": 1, + "created_by": { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "email": "email", + "avatar": "avatar", + }, + "created_at": "2024-01-15T09:30:00Z", + "finished_at": "2024-01-15T09:30:00Z", + "status": "created", + "md5": "md5", + "counters": {"key": "value"}, + "converted_formats": [ + { + "id": 1, + "status": "created", + "export_type": "export_type", + "traceback": "traceback", + } + ], + "task_filter_options": { + "view": 1, + "skipped": "skipped", + "finished": "finished", + "annotated": "annotated", + "only_with_annotations": True, + }, + "annotation_filter_options": { + "usual": True, + "ground_truth": True, + "skipped": True, + }, + "serialization_options": { + "drafts": {"only_id": True}, + "predictions": {"only_id": True}, + "include_annotation_history": True, + "annotations__completed_by": {"only_id": True}, + "interpolate_key_frames": True, + }, + } + expected_types: typing.Any = { + "title": None, + "id": "integer", + "created_by": { + "id": "integer", + "first_name": None, + "last_name": None, + "email": None, + "avatar": None, + }, + "created_at": "datetime", + "finished_at": "datetime", + "status": None, + "md5": None, + "counters": ("dict", {0: (None, None)}), + "converted_formats": ( + "list", + { + 0: { + "id": "integer", + "status": None, + "export_type": None, + "traceback": None, + } + }, + ), + "task_filter_options": { + "view": "integer", + "skipped": None, + "finished": None, + "annotated": None, + "only_with_annotations": None, + }, + "annotation_filter_options": { + "usual": None, + "ground_truth": None, + "skipped": None, + }, + "serialization_options": { + "drafts": {"only_id": None}, + "predictions": {"only_id": None}, + "include_annotation_history": None, + "annotations__completed_by": {"only_id": None}, + "interpolate_key_frames": None, + }, + } + response = client.projects.exports.create(project_id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.exports.create(project_id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "id": 1, + "created_by": { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "email": "email", + "avatar": "avatar", + }, + "created_at": "2024-01-15T09:30:00Z", + "finished_at": "2024-01-15T09:30:00Z", + "status": "created", + "md5": "md5", + "counters": {"key": "value"}, + "converted_formats": [ + { + "id": 1, + "status": "created", + "export_type": "export_type", + "traceback": "traceback", + } + ], + } + expected_types: typing.Any = { + "title": None, + "id": "integer", + "created_by": { + "id": "integer", + "first_name": None, + "last_name": None, + "email": None, + "avatar": None, + }, + "created_at": "datetime", + "finished_at": "datetime", + "status": None, + "md5": None, + "counters": ("dict", {0: (None, None)}), + "converted_formats": ( + "list", + { + 0: { + "id": "integer", + "status": None, + "export_type": None, + "traceback": None, + } + }, + ), + } + response = client.projects.exports.get(project_id=1, export_pk="export_pk") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.exports.get(project_id=1, export_pk="export_pk") + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.projects.exports.delete(project_id=1, export_pk="export_pk") # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.projects.exports.delete(project_id=1, export_pk="export_pk") # type: ignore[func-returns-value] + is None + ) + + +async def test_convert(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"export_type": "JSON", "converted_format": 1} + expected_types: typing.Any = {"export_type": None, "converted_format": "integer"} + response = client.projects.exports.convert(project_id=1, export_pk="export_pk") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.exports.convert(project_id=1, export_pk="export_pk") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/projects/test_pauses.py b/tests/projects/test_pauses.py new file mode 100644 index 000000000..5bf4ad0c3 --- /dev/null +++ b/tests/projects/test_pauses.py @@ -0,0 +1,154 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "project": 1, + "user": 1, + "paused_by": 1, + "reason": "reason", + "verbose_reason": "verbose_reason", + "deleted_by": 1, + "deleted_at": "2024-01-15T09:30:00Z", + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "project": "integer", + "user": "integer", + "paused_by": "integer", + "reason": None, + "verbose_reason": None, + "deleted_by": "integer", + "deleted_at": "datetime", + "created_at": "datetime", + "updated_at": "datetime", + } + }, + ) + response = client.projects.pauses.list(project_pk=1, user_pk=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.pauses.list(project_pk=1, user_pk=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "project": 1, + "user": 1, + "paused_by": 1, + "reason": "reason", + "verbose_reason": "verbose_reason", + "deleted_by": 1, + "deleted_at": "2024-01-15T09:30:00Z", + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "project": "integer", + "user": "integer", + "paused_by": "integer", + "reason": None, + "verbose_reason": None, + "deleted_by": "integer", + "deleted_at": "datetime", + "created_at": "datetime", + "updated_at": "datetime", + } + response = client.projects.pauses.create(project_pk=1, user_pk=1, reason="reason") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.pauses.create(project_pk=1, user_pk=1, reason="reason") + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "project": 1, + "user": 1, + "paused_by": 1, + "reason": "reason", + "verbose_reason": "verbose_reason", + "deleted_by": 1, + "deleted_at": "2024-01-15T09:30:00Z", + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "project": "integer", + "user": "integer", + "paused_by": "integer", + "reason": None, + "verbose_reason": None, + "deleted_by": "integer", + "deleted_at": "datetime", + "created_at": "datetime", + "updated_at": "datetime", + } + response = client.projects.pauses.get(project_pk=1, user_pk=1, id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.pauses.get(project_pk=1, user_pk=1, id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.projects.pauses.delete(project_pk=1, user_pk=1, id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.projects.pauses.delete(project_pk=1, user_pk=1, id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "project": 1, + "user": 1, + "paused_by": 1, + "reason": "reason", + "verbose_reason": "verbose_reason", + "deleted_by": 1, + "deleted_at": "2024-01-15T09:30:00Z", + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "project": "integer", + "user": "integer", + "paused_by": "integer", + "reason": None, + "verbose_reason": None, + "deleted_by": "integer", + "deleted_at": "datetime", + "created_at": "datetime", + "updated_at": "datetime", + } + response = client.projects.pauses.update(project_pk=1, user_pk=1, id=1, reason="reason") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.pauses.update(project_pk=1, user_pk=1, id=1, reason="reason") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/__init__.py b/tests/prompts/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/prompts/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/prompts/test_indicators.py b/tests/prompts/test_indicators.py new file mode 100644 index 000000000..8167c72e1 --- /dev/null +++ b/tests/prompts/test_indicators.py @@ -0,0 +1,47 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "key": "key", + "title": "title", + "main_kpi": "main_kpi", + "secondary_kpi": "secondary_kpi", + "additional_kpis": [{}], + "extra_kpis": [{}], + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "key": None, + "title": None, + "main_kpi": None, + "secondary_kpi": None, + "additional_kpis": ("list", {0: {}}), + "extra_kpis": ("list", {0: {}}), + } + }, + ) + response = client.prompts.indicators.list(pk=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.indicators.list(pk=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"title": "title", "values": {"key": "value"}} + expected_types: typing.Any = {"title": None, "values": ("dict", {0: (None, None)})} + response = client.prompts.indicators.get(indicator_key="indicator_key", pk=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.indicators.get(indicator_key="indicator_key", pk=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/test_runs.py b/tests/prompts/test_runs.py new file mode 100644 index 000000000..a3fe40ce5 --- /dev/null +++ b/tests/prompts/test_runs.py @@ -0,0 +1,74 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "organization": 1, + "project": 1, + "model_version": 1, + "created_by": 1, + "project_subset": "All", + "status": "Pending", + "job_id": "job_id", + "created_at": "2024-01-15T09:30:00Z", + "triggered_at": "2024-01-15T09:30:00Z", + "predictions_updated_at": "2024-01-15T09:30:00Z", + "completed_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "organization": "integer", + "project": "integer", + "model_version": "integer", + "created_by": "integer", + "project_subset": None, + "status": None, + "job_id": None, + "created_at": "datetime", + "triggered_at": "datetime", + "predictions_updated_at": "datetime", + "completed_at": "datetime", + } + response = client.prompts.runs.list(id=1, version_id=1, project=1, project_subset="All") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.runs.list(id=1, version_id=1, project=1, project_subset="All") + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "organization": 1, + "project": 1, + "model_version": 1, + "created_by": 1, + "project_subset": "All", + "status": "Pending", + "job_id": "job_id", + "created_at": "2024-01-15T09:30:00Z", + "triggered_at": "2024-01-15T09:30:00Z", + "predictions_updated_at": "2024-01-15T09:30:00Z", + "completed_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "organization": "integer", + "project": "integer", + "model_version": "integer", + "created_by": "integer", + "project_subset": None, + "status": None, + "job_id": None, + "created_at": "datetime", + "triggered_at": "datetime", + "predictions_updated_at": "datetime", + "completed_at": "datetime", + } + response = client.prompts.runs.create(id=1, version_id=1, project=1, project_subset="All") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.runs.create(id=1, version_id=1, project=1, project_subset="All") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/test_versions.py b/tests/prompts/test_versions.py new file mode 100644 index 000000000..584a55e84 --- /dev/null +++ b/tests/prompts/test_versions.py @@ -0,0 +1,280 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "title": "title", + "parent_model": 1, + "model_provider_connection": 1, + "prompt": "prompt", + "provider": "OpenAI", + "provider_model_id": "provider_model_id", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "title": None, + "parent_model": "integer", + "model_provider_connection": "integer", + "prompt": None, + "provider": None, + "provider_model_id": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + } + }, + ) + response = client.prompts.versions.list(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.list(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "parent_model": 1, + "model_provider_connection": 1, + "prompt": "prompt", + "provider": "OpenAI", + "provider_model_id": "provider_model_id", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + } + expected_types: typing.Any = { + "title": None, + "parent_model": "integer", + "model_provider_connection": "integer", + "prompt": None, + "provider": None, + "provider_model_id": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + } + response = client.prompts.versions.create(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.create(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "parent_model": 1, + "model_provider_connection": 1, + "prompt": "prompt", + "provider": "OpenAI", + "provider_model_id": "provider_model_id", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + } + expected_types: typing.Any = { + "title": None, + "parent_model": "integer", + "model_provider_connection": "integer", + "prompt": None, + "provider": None, + "provider_model_id": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + } + response = client.prompts.versions.get(id=1, version_id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.get(id=1, version_id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "parent_model": 1, + "model_provider_connection": 1, + "prompt": "prompt", + "provider": "OpenAI", + "provider_model_id": "provider_model_id", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + } + expected_types: typing.Any = { + "title": None, + "parent_model": "integer", + "model_provider_connection": "integer", + "prompt": None, + "provider": None, + "provider_model_id": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + } + response = client.prompts.versions.update(id=1, version_id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.update(id=1, version_id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_cost_estimate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "prompt_cost_usd": "prompt_cost_usd", + "completion_cost_usd": "completion_cost_usd", + "total_cost_usd": "total_cost_usd", + "is_error": True, + "error_type": "error_type", + "error_message": "error_message", + } + expected_types: typing.Any = { + "prompt_cost_usd": None, + "completion_cost_usd": None, + "total_cost_usd": None, + "is_error": None, + "error_type": None, + "error_message": None, + } + response = client.prompts.versions.cost_estimate(prompt_id=1, version_id=1, project_id=1, project_subset=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.cost_estimate( + prompt_id=1, version_id=1, project_id=1, project_subset=1 + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_get_refined_prompt(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "reasoning": "reasoning", + "prompt": "prompt", + "refinement_job_id": "refinement_job_id", + "refinement_status": "Pending", + "total_cost": "total_cost", + "previous_version": { + "title": "title", + "parent_model": 1, + "model_provider_connection": 1, + "prompt": "prompt", + "provider": "OpenAI", + "provider_model_id": "provider_model_id", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + }, + } + expected_types: typing.Any = { + "title": None, + "reasoning": None, + "prompt": None, + "refinement_job_id": None, + "refinement_status": None, + "total_cost": None, + "previous_version": { + "title": None, + "parent_model": "integer", + "model_provider_connection": "integer", + "prompt": None, + "provider": None, + "provider_model_id": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + }, + } + response = client.prompts.versions.get_refined_prompt( + prompt_id=1, version_id=1, refinement_job_id="refinement_job_id" + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.get_refined_prompt( + prompt_id=1, version_id=1, refinement_job_id="refinement_job_id" + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_refine_prompt(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "reasoning": "reasoning", + "prompt": "prompt", + "refinement_job_id": "refinement_job_id", + "refinement_status": "Pending", + "total_cost": "total_cost", + "previous_version": { + "title": "title", + "parent_model": 1, + "model_provider_connection": 1, + "prompt": "prompt", + "provider": "OpenAI", + "provider_model_id": "provider_model_id", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + }, + } + expected_types: typing.Any = { + "title": None, + "reasoning": None, + "prompt": None, + "refinement_job_id": None, + "refinement_status": None, + "total_cost": None, + "previous_version": { + "title": None, + "parent_model": "integer", + "model_provider_connection": "integer", + "prompt": None, + "provider": None, + "provider_model_id": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + }, + } + response = client.prompts.versions.refine_prompt(prompt_id=1, version_id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.versions.refine_prompt(prompt_id=1, version_id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_actions.py b/tests/test_actions.py new file mode 100644 index 000000000..e34997d2b --- /dev/null +++ b/tests/test_actions.py @@ -0,0 +1,65 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +from label_studio_sdk.actions import ActionsCreateRequestFilters +from label_studio_sdk.actions import ActionsCreateRequestFiltersItemsItem +from label_studio_sdk.actions import ActionsCreateRequestSelectedItemsExcluded + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.actions.list() # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.actions.list() # type: ignore[func-returns-value] + is None + ) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.actions.create( + id="retrieve_tasks_predictions", + project=1, + filters=ActionsCreateRequestFilters( + conjunction="or", + items=[ + ActionsCreateRequestFiltersItemsItem( + filter="filter:tasks:id", + operator="greater", + type="Number", + value=123, + ) + ], + ), + selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), + ordering=["tasks:total_annotations"], + ) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.actions.create( + id="retrieve_tasks_predictions", + project=1, + filters=ActionsCreateRequestFilters( + conjunction="or", + items=[ + ActionsCreateRequestFiltersItemsItem( + filter="filter:tasks:id", + operator="greater", + type="Number", + value=123, + ) + ], + ), + selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), + ordering=["tasks:total_annotations"], + ) # type: ignore[func-returns-value] + is None + ) diff --git a/tests/test_annotations.py b/tests/test_annotations.py new file mode 100644 index 000000000..43493c8a3 --- /dev/null +++ b/tests/test_annotations.py @@ -0,0 +1,467 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "created_username": "created_username", + "created_ago": "created_ago", + "completed_by": 1, + "unique_id": "unique_id", + "was_cancelled": False, + "ground_truth": False, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "draft_created_at": "2024-01-15T09:30:00Z", + "lead_time": 10, + "import_id": 1, + "last_action": "prediction", + "task": 1, + "project": 1, + "updated_by": 1, + "parent_prediction": 1, + "parent_annotation": 1, + "last_created_by": 1, + } + expected_types: typing.Any = { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "created_username": None, + "created_ago": None, + "completed_by": "integer", + "unique_id": None, + "was_cancelled": None, + "ground_truth": None, + "created_at": "datetime", + "updated_at": "datetime", + "draft_created_at": "datetime", + "lead_time": None, + "import_id": "integer", + "last_action": None, + "task": "integer", + "project": "integer", + "updated_by": "integer", + "parent_prediction": "integer", + "parent_annotation": "integer", + "last_created_by": "integer", + } + response = client.annotations.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.annotations.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.annotations.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.annotations.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "created_username": "created_username", + "created_ago": "created_ago", + "completed_by": 1, + "unique_id": "unique_id", + "was_cancelled": False, + "ground_truth": False, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "draft_created_at": "2024-01-15T09:30:00Z", + "lead_time": 10, + "import_id": 1, + "last_action": "prediction", + "task": 1, + "project": 1, + "updated_by": 1, + "parent_prediction": 1, + "parent_annotation": 1, + "last_created_by": 1, + } + expected_types: typing.Any = { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "created_username": None, + "created_ago": None, + "completed_by": "integer", + "unique_id": None, + "was_cancelled": None, + "ground_truth": None, + "created_at": "datetime", + "updated_at": "datetime", + "draft_created_at": "datetime", + "lead_time": None, + "import_id": "integer", + "last_action": None, + "task": "integer", + "project": "integer", + "updated_by": "integer", + "parent_prediction": "integer", + "parent_annotation": "integer", + "last_created_by": "integer", + } + response = client.annotations.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.annotations.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "created_username": "created_username", + "created_ago": "created_ago", + "completed_by": 1, + "unique_id": "unique_id", + "was_cancelled": False, + "ground_truth": False, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "draft_created_at": "2024-01-15T09:30:00Z", + "lead_time": 10, + "import_id": 1, + "last_action": "prediction", + "task": 1, + "project": 1, + "updated_by": 1, + "parent_prediction": 1, + "parent_annotation": 1, + "last_created_by": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "created_username": None, + "created_ago": None, + "completed_by": "integer", + "unique_id": None, + "was_cancelled": None, + "ground_truth": None, + "created_at": "datetime", + "updated_at": "datetime", + "draft_created_at": "datetime", + "lead_time": None, + "import_id": "integer", + "last_action": None, + "task": "integer", + "project": "integer", + "updated_by": "integer", + "parent_prediction": "integer", + "parent_annotation": "integer", + "last_created_by": "integer", + } + }, + ) + response = client.annotations.list(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.annotations.list(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "created_username": "created_username", + "created_ago": "created_ago", + "completed_by": 1, + "unique_id": "unique_id", + "was_cancelled": False, + "ground_truth": False, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "draft_created_at": "2024-01-15T09:30:00Z", + "lead_time": 10, + "import_id": 1, + "last_action": "prediction", + "task": 1, + "project": 1, + "updated_by": 1, + "parent_prediction": 1, + "parent_annotation": 1, + "last_created_by": 1, + } + expected_types: typing.Any = { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "created_username": None, + "created_ago": None, + "completed_by": "integer", + "unique_id": None, + "was_cancelled": None, + "ground_truth": None, + "created_at": "datetime", + "updated_at": "datetime", + "draft_created_at": "datetime", + "lead_time": None, + "import_id": "integer", + "last_action": None, + "task": "integer", + "project": "integer", + "updated_by": "integer", + "parent_prediction": "integer", + "parent_annotation": "integer", + "last_created_by": "integer", + } + response = client.annotations.create( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.annotations.create( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + was_cancelled=False, + ground_truth=True, + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_create_bulk(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [{"id": 1}] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + {0: {"id": "integer"}}, + ) + response = client.annotations.create_bulk() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.annotations.create_bulk() + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_comments.py b/tests/test_comments.py new file mode 100644 index 000000000..509abf4ee --- /dev/null +++ b/tests/test_comments.py @@ -0,0 +1,154 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "text": "text", + "project": 1, + "task": 1, + "annotation": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_resolved": True, + "resolved_at": "2024-01-15T09:30:00Z", + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "text": None, + "project": "integer", + "task": "integer", + "annotation": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_resolved": None, + "resolved_at": "datetime", + } + }, + ) + response = client.comments.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.comments.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "text": "text", + "project": 1, + "task": 1, + "annotation": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_resolved": True, + "resolved_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "text": None, + "project": "integer", + "task": "integer", + "annotation": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_resolved": None, + "resolved_at": "datetime", + } + response = client.comments.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.comments.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "text": "text", + "project": 1, + "task": 1, + "annotation": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_resolved": True, + "resolved_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "text": None, + "project": "integer", + "task": "integer", + "annotation": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_resolved": None, + "resolved_at": "datetime", + } + response = client.comments.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.comments.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.comments.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.comments.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "text": "text", + "project": 1, + "task": 1, + "annotation": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_resolved": True, + "resolved_at": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "text": None, + "project": "integer", + "task": "integer", + "annotation": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_resolved": None, + "resolved_at": "datetime", + } + response = client.comments.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.comments.update(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_export_storage.py b/tests/test_export_storage.py new file mode 100644 index 000000000..b15334bac --- /dev/null +++ b/tests/test_export_storage.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [{"name": "name", "title": "title"}] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + {0: {"name": None, "title": None}}, + ) + response = client.export_storage.list_types() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.list_types() + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 000000000..7bf2e7fde --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,78 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"id": 1, "file": "file"} + expected_types: typing.Any = {"id": "integer", "file": None} + response = client.files.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.files.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.files.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.files.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"id": 1, "file": "file"} + expected_types: typing.Any = {"id": "integer", "file": None} + response = client.files.update(id_=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.files.update(id_=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [{"id": 1, "file": "file"}] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + {0: {"id": "integer", "file": None}}, + ) + response = client.files.list(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.files.list(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete_many(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.files.delete_many(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.files.delete_many(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_download(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.files.download(filename="filename") # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.files.download(filename="filename") # type: ignore[func-returns-value] + is None + ) diff --git a/tests/test_import_storage.py b/tests/test_import_storage.py new file mode 100644 index 000000000..1a4f25768 --- /dev/null +++ b/tests/test_import_storage.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [{"name": "name", "title": "title"}] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + {0: {"name": None, "title": None}}, + ) + response = client.import_storage.list_types() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.list_types() + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_jwt_settings.py b/tests/test_jwt_settings.py new file mode 100644 index 000000000..e17aa3d14 --- /dev/null +++ b/tests/test_jwt_settings.py @@ -0,0 +1,44 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "api_tokens_enabled": True, + "legacy_api_tokens_enabled": True, + "api_token_ttl_days": 1, + } + expected_types: typing.Any = { + "api_tokens_enabled": None, + "legacy_api_tokens_enabled": None, + "api_token_ttl_days": "integer", + } + response = client.jwt_settings.get() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.jwt_settings.get() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "api_tokens_enabled": True, + "legacy_api_tokens_enabled": True, + "api_token_ttl_days": 1, + } + expected_types: typing.Any = { + "api_tokens_enabled": None, + "legacy_api_tokens_enabled": None, + "api_token_ttl_days": "integer", + } + response = client.jwt_settings.create(api_tokens_enabled=True, legacy_api_tokens_enabled=True, api_token_ttl_days=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.jwt_settings.create( + api_tokens_enabled=True, legacy_api_tokens_enabled=True, api_token_ttl_days=1 + ) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_ml.py b/tests/test_ml.py new file mode 100644 index 000000000..0a5badc22 --- /dev/null +++ b/tests/test_ml.py @@ -0,0 +1,229 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "state": "CO", + "readable_state": "readable_state", + "is_interactive": True, + "url": "url", + "error_message": "error_message", + "title": "title", + "auth_method": "NONE", + "basic_auth_user": "basic_auth_user", + "basic_auth_pass": "basic_auth_pass", + "basic_auth_pass_is_set": "basic_auth_pass_is_set", + "description": "description", + "extra_params": {"key": "value"}, + "model_version": "model_version", + "timeout": 1.1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "auto_update": True, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "state": None, + "readable_state": None, + "is_interactive": None, + "url": None, + "error_message": None, + "title": None, + "auth_method": None, + "basic_auth_user": None, + "basic_auth_pass": None, + "basic_auth_pass_is_set": None, + "description": None, + "extra_params": ("dict", {0: (None, None)}), + "model_version": None, + "timeout": None, + "created_at": "datetime", + "updated_at": "datetime", + "auto_update": None, + "project": "integer", + } + }, + ) + response = client.ml.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.ml.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "url": "url", + "project": 1, + "is_interactive": True, + "title": "title", + "description": "description", + "auth_method": "NONE", + "basic_auth_user": "basic_auth_user", + "basic_auth_pass": "basic_auth_pass", + "extra_params": {"key": "value"}, + "timeout": 1, + } + expected_types: typing.Any = { + "url": None, + "project": "integer", + "is_interactive": None, + "title": None, + "description": None, + "auth_method": None, + "basic_auth_user": None, + "basic_auth_pass": None, + "extra_params": ("dict", {0: (None, None)}), + "timeout": "integer", + } + response = client.ml.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.ml.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "state": "CO", + "readable_state": "readable_state", + "is_interactive": True, + "url": "url", + "error_message": "error_message", + "title": "title", + "auth_method": "NONE", + "basic_auth_user": "basic_auth_user", + "basic_auth_pass": "basic_auth_pass", + "basic_auth_pass_is_set": "basic_auth_pass_is_set", + "description": "description", + "extra_params": {"key": "value"}, + "model_version": "model_version", + "timeout": 1.1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "auto_update": True, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "state": None, + "readable_state": None, + "is_interactive": None, + "url": None, + "error_message": None, + "title": None, + "auth_method": None, + "basic_auth_user": None, + "basic_auth_pass": None, + "basic_auth_pass_is_set": None, + "description": None, + "extra_params": ("dict", {0: (None, None)}), + "model_version": None, + "timeout": None, + "created_at": "datetime", + "updated_at": "datetime", + "auto_update": None, + "project": "integer", + } + response = client.ml.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.ml.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.ml.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.ml.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "url": "url", + "project": 1, + "is_interactive": True, + "title": "title", + "description": "description", + "auth_method": "NONE", + "basic_auth_user": "basic_auth_user", + "basic_auth_pass": "basic_auth_pass", + "extra_params": {"key": "value"}, + "timeout": 1, + } + expected_types: typing.Any = { + "url": None, + "project": "integer", + "is_interactive": None, + "title": None, + "description": None, + "auth_method": None, + "basic_auth_user": None, + "basic_auth_pass": None, + "extra_params": ("dict", {0: (None, None)}), + "timeout": "integer", + } + response = client.ml.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.ml.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_predict_interactive(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_train(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.ml.train(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.ml.train(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_list_model_versions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] + is None + ) diff --git a/tests/test_model_providers.py b/tests/test_model_providers.py new file mode 100644 index 000000000..353ad2339 --- /dev/null +++ b/tests/test_model_providers.py @@ -0,0 +1,194 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "provider": "OpenAI", + "api_key": "api_key", + "deployment_name": "deployment_name", + "endpoint": "endpoint", + "scope": "Organization", + "organization": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_internal": True, + "budget_limit": 1.1, + "budget_last_reset_date": "2024-01-15T09:30:00Z", + "budget_reset_period": "Monthly", + "budget_total_spent": 1.1, + "budget_alert_threshold": 1.1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "provider": None, + "api_key": None, + "deployment_name": None, + "endpoint": None, + "scope": None, + "organization": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_internal": None, + "budget_limit": None, + "budget_last_reset_date": "datetime", + "budget_reset_period": None, + "budget_total_spent": None, + "budget_alert_threshold": None, + } + }, + ) + response = client.model_providers.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.model_providers.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "provider": "OpenAI", + "api_key": "api_key", + "deployment_name": "deployment_name", + "endpoint": "endpoint", + "scope": "Organization", + "organization": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_internal": True, + "budget_limit": 1.1, + "budget_last_reset_date": "2024-01-15T09:30:00Z", + "budget_reset_period": "Monthly", + "budget_total_spent": 1.1, + "budget_alert_threshold": 1.1, + } + expected_types: typing.Any = { + "provider": None, + "api_key": None, + "deployment_name": None, + "endpoint": None, + "scope": None, + "organization": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_internal": None, + "budget_limit": None, + "budget_last_reset_date": "datetime", + "budget_reset_period": None, + "budget_total_spent": None, + "budget_alert_threshold": None, + } + response = client.model_providers.create(provider="OpenAI") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.model_providers.create(provider="OpenAI") + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "provider": "OpenAI", + "api_key": "api_key", + "deployment_name": "deployment_name", + "endpoint": "endpoint", + "scope": "Organization", + "organization": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_internal": True, + "budget_limit": 1.1, + "budget_last_reset_date": "2024-01-15T09:30:00Z", + "budget_reset_period": "Monthly", + "budget_total_spent": 1.1, + "budget_alert_threshold": 1.1, + } + expected_types: typing.Any = { + "provider": None, + "api_key": None, + "deployment_name": None, + "endpoint": None, + "scope": None, + "organization": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_internal": None, + "budget_limit": None, + "budget_last_reset_date": "datetime", + "budget_reset_period": None, + "budget_total_spent": None, + "budget_alert_threshold": None, + } + response = client.model_providers.get(pk=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.model_providers.get(pk=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.model_providers.delete(pk=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.model_providers.delete(pk=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "provider": "OpenAI", + "api_key": "api_key", + "deployment_name": "deployment_name", + "endpoint": "endpoint", + "scope": "Organization", + "organization": 1, + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "is_internal": True, + "budget_limit": 1.1, + "budget_last_reset_date": "2024-01-15T09:30:00Z", + "budget_reset_period": "Monthly", + "budget_total_spent": 1.1, + "budget_alert_threshold": 1.1, + } + expected_types: typing.Any = { + "provider": None, + "api_key": None, + "deployment_name": None, + "endpoint": None, + "scope": None, + "organization": "integer", + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "is_internal": None, + "budget_limit": None, + "budget_last_reset_date": "datetime", + "budget_reset_period": None, + "budget_total_spent": None, + "budget_alert_threshold": None, + } + response = client.model_providers.update(pk=1, provider="OpenAI") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.model_providers.update(pk=1, provider="OpenAI") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_predictions.py b/tests/test_predictions.py new file mode 100644 index 000000000..f8559d6b3 --- /dev/null +++ b/tests/test_predictions.py @@ -0,0 +1,404 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "model_version": "yolo-v8", + "created_ago": "created_ago", + "score": 0.95, + "cluster": 1, + "neighbors": {"key": "value"}, + "mislabeling": 1.1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "model": 1, + "model_run": 1, + "task": 1, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "model_version": None, + "created_ago": None, + "score": None, + "cluster": "integer", + "neighbors": ("dict", {0: (None, None)}), + "mislabeling": None, + "created_at": "datetime", + "updated_at": "datetime", + "model": "integer", + "model_run": "integer", + "task": "integer", + "project": "integer", + } + }, + ) + response = client.predictions.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.predictions.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "model_version": "yolo-v8", + "created_ago": "created_ago", + "score": 0.95, + "cluster": 1, + "neighbors": {"key": "value"}, + "mislabeling": 1.1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "model": 1, + "model_run": 1, + "task": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "model_version": None, + "created_ago": None, + "score": None, + "cluster": "integer", + "neighbors": ("dict", {0: (None, None)}), + "mislabeling": None, + "created_at": "datetime", + "updated_at": "datetime", + "model": "integer", + "model_run": "integer", + "task": "integer", + "project": "integer", + } + response = client.predictions.create( + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.predictions.create( + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "model_version": "yolo-v8", + "created_ago": "created_ago", + "score": 0.95, + "cluster": 1, + "neighbors": {"key": "value"}, + "mislabeling": 1.1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "model": 1, + "model_run": 1, + "task": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "model_version": None, + "created_ago": None, + "score": None, + "cluster": "integer", + "neighbors": ("dict", {0: (None, None)}), + "mislabeling": None, + "created_at": "datetime", + "updated_at": "datetime", + "model": "integer", + "model_run": "integer", + "task": "integer", + "project": "integer", + } + response = client.predictions.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.predictions.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.predictions.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.predictions.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "result": [ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + "model_version": "yolo-v8", + "created_ago": "created_ago", + "score": 0.95, + "cluster": 1, + "neighbors": {"key": "value"}, + "mislabeling": 1.1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "model": 1, + "model_run": 1, + "task": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "result": ( + "list", + { + 0: ( + "dict", + { + 0: (None, None), + 1: (None, None), + 2: (None, None), + 3: (None, None), + 4: (None, None), + 5: (None, None), + 6: (None, None), + }, + ) + }, + ), + "model_version": None, + "created_ago": None, + "score": None, + "cluster": "integer", + "neighbors": ("dict", {0: (None, None)}), + "mislabeling": None, + "created_at": "datetime", + "updated_at": "datetime", + "model": "integer", + "model_run": "integer", + "task": "integer", + "project": "integer", + } + response = client.predictions.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.predictions.update( + id=1, + result=[ + { + "original_width": 1920, + "original_height": 1080, + "image_rotation": 0, + "from_name": "bboxes", + "to_name": "image", + "type": "rectanglelabels", + "value": { + "x": 20, + "y": 30, + "width": 50, + "height": 60, + "rotation": 0, + "values": {"rectanglelabels": ["Person"]}, + }, + } + ], + score=0.95, + model_version="yolo-v8", + ) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_projects.py b/tests/test_projects.py new file mode 100644 index 000000000..61fb9793b --- /dev/null +++ b/tests/test_projects.py @@ -0,0 +1,296 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "My project", + "description": "My first project", + "label_config": "[...]", + "expert_instruction": "Label all cats", + "show_instruction": True, + "show_skip_button": True, + "enable_empty_annotation": True, + "show_annotation_history": True, + "reveal_preannotations_interactively": True, + "show_collab_predictions": True, + "maximum_annotations": 1, + "color": "color", + "control_weights": { + "my_bbox": { + "type": "RectangleLabels", + "labels": {"Car": 1, "Airplaine": 0.5}, + "overall": 0.33, + } + }, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "label_config": None, + "expert_instruction": None, + "show_instruction": None, + "show_skip_button": None, + "enable_empty_annotation": None, + "show_annotation_history": None, + "reveal_preannotations_interactively": None, + "show_collab_predictions": None, + "maximum_annotations": "integer", + "color": None, + "control_weights": ("dict", {0: (None, None)}), + } + response = client.projects.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "My project", + "description": "My first project", + "label_config": "[...]", + "expert_instruction": "Label all cats", + "show_instruction": True, + "show_skip_button": True, + "enable_empty_annotation": True, + "show_annotation_history": True, + "organization": 1, + "prompts": [ + { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + ], + "color": "#FF0000", + "maximum_annotations": 1, + "annotation_limit_count": 10, + "annotation_limit_percent": 50, + "is_published": True, + "model_version": "1.0.0", + "is_draft": False, + "created_by": { + "id": 1, + "first_name": "Jo", + "last_name": "Doe", + "email": "manager@humansignal.com", + "avatar": "avatar", + }, + "created_at": "2023-08-24T14:15:22Z", + "min_annotations_to_start_training": 0, + "start_training_on_annotation_update": True, + "show_collab_predictions": True, + "num_tasks_with_annotations": 10, + "task_number": 100, + "useful_annotation_number": 10, + "ground_truth_number": 5, + "skipped_annotations_number": 0, + "total_annotations_number": 10, + "total_predictions_number": 0, + "sampling": "Sequential sampling", + "show_ground_truth_first": True, + "show_overlap_first": True, + "overlap_cohort_percentage": 100, + "task_data_login": "user", + "task_data_password": "secret", + "control_weights": {"key": "value"}, + "parsed_label_config": {"key": "value"}, + "evaluate_predictions_automatically": False, + "config_has_control_tags": True, + "skip_queue": "REQUEUE_FOR_ME", + "reveal_preannotations_interactively": True, + "pinned_at": "2023-08-24T14:15:22Z", + "finished_task_number": 10, + "queue_total": 10, + "queue_done": 100, + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "label_config": None, + "expert_instruction": None, + "show_instruction": None, + "show_skip_button": None, + "enable_empty_annotation": None, + "show_annotation_history": None, + "organization": "integer", + "prompts": ( + "list", + { + 0: { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + }, + ), + "color": None, + "maximum_annotations": "integer", + "annotation_limit_count": "integer", + "annotation_limit_percent": None, + "is_published": None, + "model_version": None, + "is_draft": None, + "created_by": { + "id": "integer", + "first_name": None, + "last_name": None, + "email": None, + "avatar": None, + }, + "created_at": "datetime", + "min_annotations_to_start_training": "integer", + "start_training_on_annotation_update": None, + "show_collab_predictions": None, + "num_tasks_with_annotations": "integer", + "task_number": "integer", + "useful_annotation_number": "integer", + "ground_truth_number": "integer", + "skipped_annotations_number": "integer", + "total_annotations_number": "integer", + "total_predictions_number": "integer", + "sampling": None, + "show_ground_truth_first": None, + "show_overlap_first": None, + "overlap_cohort_percentage": "integer", + "task_data_login": None, + "task_data_password": None, + "control_weights": ("dict", {0: (None, None)}), + "parsed_label_config": ("dict", {0: (None, None)}), + "evaluate_predictions_automatically": None, + "config_has_control_tags": None, + "skip_queue": None, + "reveal_preannotations_interactively": None, + "pinned_at": "datetime", + "finished_task_number": "integer", + "queue_total": "integer", + "queue_done": "integer", + } + response = client.projects.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.projects.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.projects.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "My project", + "description": "My first project", + "label_config": "[...]", + "expert_instruction": "Label all cats", + "show_instruction": True, + "show_skip_button": True, + "enable_empty_annotation": True, + "show_annotation_history": True, + "reveal_preannotations_interactively": True, + "show_collab_predictions": True, + "maximum_annotations": 1, + "annotation_limit_count": 1, + "annotation_limit_percent": 1.1, + "color": "color", + "control_weights": { + "my_bbox": { + "type": "RectangleLabels", + "labels": {"Car": 1, "Airplaine": 0.5}, + "overall": 0.33, + } + }, + } + expected_types: typing.Any = { + "title": None, + "description": None, + "label_config": None, + "expert_instruction": None, + "show_instruction": None, + "show_skip_button": None, + "enable_empty_annotation": None, + "show_annotation_history": None, + "reveal_preannotations_interactively": None, + "show_collab_predictions": None, + "maximum_annotations": "integer", + "annotation_limit_count": "integer", + "annotation_limit_percent": None, + "color": None, + "control_weights": ("dict", {0: (None, None)}), + } + response = client.projects.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_import_tasks(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "task_count": 1, + "annotation_count": 1, + "predictions_count": 1, + "duration": 1.1, + "file_upload_ids": [1], + "could_be_tasks_list": True, + "found_formats": ["found_formats"], + "data_columns": ["data_columns"], + } + expected_types: typing.Any = { + "task_count": "integer", + "annotation_count": "integer", + "predictions_count": "integer", + "duration": None, + "file_upload_ids": ("list", {0: "integer"}), + "could_be_tasks_list": None, + "found_formats": ("list", {0: None}), + "data_columns": ("list", {0: None}), + } + response = client.projects.import_tasks(id=1, request=[{"key": "value"}]) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.import_tasks(id=1, request=[{"key": "value"}]) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate_config(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"label_config": "label_config"} + expected_types: typing.Any = {"label_config": None} + response = client.projects.validate_config(id=1, label_config="label_config") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.projects.validate_config(id=1, label_config="label_config") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_prompts.py b/tests/test_prompts.py new file mode 100644 index 000000000..7b13a67ff --- /dev/null +++ b/tests/test_prompts.py @@ -0,0 +1,186 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + }, + ) + response = client.prompts.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + expected_types: typing.Any = { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + response = client.prompts.create(title="title", input_fields=["input_fields"], output_classes=["output_classes"]) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.create( + title="title", input_fields=["input_fields"], output_classes=["output_classes"] + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + expected_types: typing.Any = { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + response = client.prompts.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.prompts.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.prompts.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "title": "title", + "description": "description", + "created_by": 1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "organization": 1, + "input_fields": ["input_fields"], + "output_classes": ["output_classes"], + "associated_projects": [1], + "skill_name": "skill_name", + } + expected_types: typing.Any = { + "title": None, + "description": None, + "created_by": "integer", + "created_at": "datetime", + "updated_at": "datetime", + "organization": "integer", + "input_fields": ("list", {0: None}), + "output_classes": ("list", {0: None}), + "associated_projects": ("list", {0: "integer"}), + "skill_name": None, + } + response = client.prompts.update( + id=1, + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.update( + id=1, + title="title", + input_fields=["input_fields"], + output_classes=["output_classes"], + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_batch_predictions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"detail": "detail"} + expected_types: typing.Any = {"detail": None} + response = client.prompts.batch_predictions() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.batch_predictions() + validate_response(async_response, expected_response, expected_types) + + +async def test_batch_failed_predictions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"detail": "detail"} + expected_types: typing.Any = {"detail": None} + response = client.prompts.batch_failed_predictions() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.prompts.batch_failed_predictions() + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_tasks.py b/tests/test_tasks.py new file mode 100644 index 000000000..3b16590c9 --- /dev/null +++ b/tests/test_tasks.py @@ -0,0 +1,353 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_create_many_status(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "preannotated_from_fields": {"key": "value"}, + "commit_to_project": True, + "return_task_ids": True, + "status": "created", + "url": "url", + "traceback": "traceback", + "error": "error", + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "finished_at": "2024-01-15T09:30:00Z", + "task_count": 1, + "annotation_count": 1, + "prediction_count": 1, + "duration": 1, + "file_upload_ids": {"key": "value"}, + "could_be_tasks_list": True, + "found_formats": {"key": "value"}, + "data_columns": {"key": "value"}, + "tasks": {"key": "value"}, + "task_ids": {"key": "value"}, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "preannotated_from_fields": ("dict", {0: (None, None)}), + "commit_to_project": None, + "return_task_ids": None, + "status": None, + "url": None, + "traceback": None, + "error": None, + "created_at": "datetime", + "updated_at": "datetime", + "finished_at": "datetime", + "task_count": "integer", + "annotation_count": "integer", + "prediction_count": "integer", + "duration": "integer", + "file_upload_ids": ("dict", {0: (None, None)}), + "could_be_tasks_list": None, + "found_formats": ("dict", {0: (None, None)}), + "data_columns": ("dict", {0: (None, None)}), + "tasks": ("dict", {0: (None, None)}), + "task_ids": ("dict", {0: (None, None)}), + "project": "integer", + } + response = client.tasks.create_many_status(id=1, import_pk="import_pk") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tasks.create_many_status(id=1, import_pk="import_pk") + validate_response(async_response, expected_response, expected_types) + + +async def test_delete_all_tasks(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "data": {"image": "https://example.com/image.jpg", "text": "Hello, AI!"}, + "meta": {"key": "value"}, + "created_at": "2024-06-18T23:45:46Z", + "updated_at": "2024-06-18T23:45:46Z", + "is_labeled": False, + "overlap": 1, + "inner_id": 1, + "total_annotations": 0, + "cancelled_annotations": 0, + "total_predictions": 0, + "comment_count": 0, + "unresolved_comment_count": 0, + "last_comment_updated_at": "2024-01-15T09:30:00Z", + "project": 1, + "updated_by": [{"user_id": 1}], + "file_upload": "42d46c4c-my-pic.jpeg", + "comment_authors": [1], + } + expected_types: typing.Any = { + "id": "integer", + "data": ("dict", {0: (None, None), 1: (None, None)}), + "meta": ("dict", {0: (None, None)}), + "created_at": "datetime", + "updated_at": "datetime", + "is_labeled": None, + "overlap": "integer", + "inner_id": "integer", + "total_annotations": "integer", + "cancelled_annotations": "integer", + "total_predictions": "integer", + "comment_count": "integer", + "unresolved_comment_count": "integer", + "last_comment_updated_at": "datetime", + "project": "integer", + "updated_by": ("list", {0: ("dict", {0: (None, None)})}), + "file_upload": None, + "comment_authors": ("list", {0: "integer"}), + } + response = client.tasks.create( + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tasks.create( + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, + ) + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 13, + "predictions": [ + { + "result": [{"key": "value"}], + "score": 1.1, + "model_version": "model_version", + "model": {"key": "value"}, + "model_run": {"key": "value"}, + "task": 1, + "project": 1.1, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + } + ], + "annotations": [ + { + "id": 1, + "result": [{"key": "value"}], + "created_username": "created_username", + "created_ago": "created_ago", + "completed_by": {"key": "value"}, + "unique_id": "unique_id", + "was_cancelled": True, + "ground_truth": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "draft_created_at": "2024-01-15T09:30:00Z", + "lead_time": 1.1, + "import_id": 1, + "last_action": "prediction", + "task": 1, + "project": 1, + "updated_by": 1, + "parent_prediction": 1, + "parent_annotation": 1, + "last_created_by": 1, + } + ], + "drafts": [ + { + "result": [{"key": "value"}], + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + } + ], + "annotators": [1], + "inner_id": 2, + "cancelled_annotations": 0, + "total_annotations": 0, + "total_predictions": 0, + "annotations_results": "", + "predictions_results": "", + "file_upload": "6b25fc23-some_3.mp4", + "annotations_ids": "", + "predictions_model_versions": "", + "draft_exists": False, + "updated_by": [{"key": "value"}], + "data": {"image": "/data/upload/1/6b25fc23-some_3.mp4"}, + "meta": {"key": "value"}, + "created_at": "2024-06-18T23:45:46Z", + "updated_at": "2024-06-18T23:45:46Z", + "is_labeled": False, + "overlap": 1, + "comment_count": 0, + "unresolved_comment_count": 0, + "project": 1, + "comment_authors": [1], + } + expected_types: typing.Any = { + "id": "integer", + "predictions": ( + "list", + { + 0: { + "result": ("list", {0: ("dict", {0: (None, None)})}), + "score": None, + "model_version": None, + "model": ("dict", {0: (None, None)}), + "model_run": ("dict", {0: (None, None)}), + "task": "integer", + "project": None, + "created_at": "datetime", + "updated_at": "datetime", + } + }, + ), + "annotations": ( + "list", + { + 0: { + "id": "integer", + "result": ("list", {0: ("dict", {0: (None, None)})}), + "created_username": None, + "created_ago": None, + "completed_by": ("dict", {0: (None, None)}), + "unique_id": None, + "was_cancelled": None, + "ground_truth": None, + "created_at": "datetime", + "updated_at": "datetime", + "draft_created_at": "datetime", + "lead_time": None, + "import_id": "integer", + "last_action": None, + "task": "integer", + "project": "integer", + "updated_by": "integer", + "parent_prediction": "integer", + "parent_annotation": "integer", + "last_created_by": "integer", + } + }, + ), + "drafts": ( + "list", + { + 0: { + "result": ("list", {0: ("dict", {0: (None, None)})}), + "created_at": "datetime", + "updated_at": "datetime", + } + }, + ), + "annotators": ("list", {0: "integer"}), + "inner_id": "integer", + "cancelled_annotations": "integer", + "total_annotations": "integer", + "total_predictions": "integer", + "annotations_results": None, + "predictions_results": None, + "file_upload": None, + "annotations_ids": None, + "predictions_model_versions": None, + "draft_exists": None, + "updated_by": ("list", {0: ("dict", {0: (None, None)})}), + "data": ("dict", {0: (None, None)}), + "meta": ("dict", {0: (None, None)}), + "created_at": "datetime", + "updated_at": "datetime", + "is_labeled": None, + "overlap": "integer", + "comment_count": "integer", + "unresolved_comment_count": "integer", + "project": "integer", + "comment_authors": ("list", {0: "integer"}), + } + response = client.tasks.get(id="id") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tasks.get(id="id") + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.tasks.delete(id="id") # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.tasks.delete(id="id") # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "data": {"image": "https://example.com/image.jpg", "text": "Hello, AI!"}, + "meta": {"key": "value"}, + "created_at": "2024-06-18T23:45:46Z", + "updated_at": "2024-06-18T23:45:46Z", + "is_labeled": False, + "overlap": 1, + "inner_id": 1, + "total_annotations": 0, + "cancelled_annotations": 0, + "total_predictions": 0, + "comment_count": 0, + "unresolved_comment_count": 0, + "last_comment_updated_at": "2024-01-15T09:30:00Z", + "project": 1, + "updated_by": [{"user_id": 1}], + "file_upload": "42d46c4c-my-pic.jpeg", + "comment_authors": [1], + } + expected_types: typing.Any = { + "id": "integer", + "data": ("dict", {0: (None, None), 1: (None, None)}), + "meta": ("dict", {0: (None, None)}), + "created_at": "datetime", + "updated_at": "datetime", + "is_labeled": None, + "overlap": "integer", + "inner_id": "integer", + "total_annotations": "integer", + "cancelled_annotations": "integer", + "total_predictions": "integer", + "comment_count": "integer", + "unresolved_comment_count": "integer", + "last_comment_updated_at": "datetime", + "project": "integer", + "updated_by": ("list", {0: ("dict", {0: (None, None)})}), + "file_upload": None, + "comment_authors": ("list", {0: "integer"}), + } + response = client.tasks.update( + id="id", + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, + ) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tasks.update( + id="id", + data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, + project=1, + ) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_tokens.py b/tests/test_tokens.py new file mode 100644 index 000000000..706651d59 --- /dev/null +++ b/tests/test_tokens.py @@ -0,0 +1,66 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_blacklist(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.tokens.blacklist(refresh="refresh") # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.tokens.blacklist(refresh="refresh") # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [{"token": "token", "created_at": "created_at", "expires_at": "expires_at"}] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + {0: {"token": None, "created_at": None, "expires_at": None}}, + ) + response = client.tokens.get() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tokens.get() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "token": "token", + "created_at": "created_at", + "expires_at": "expires_at", + } + expected_types: typing.Any = {"token": None, "created_at": None, "expires_at": None} + response = client.tokens.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tokens.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_refresh(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"access": "access"} + expected_types: typing.Any = {"access": None} + response = client.tokens.refresh(refresh="refresh") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tokens.refresh(refresh="refresh") + validate_response(async_response, expected_response, expected_types) + + +async def test_rotate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"refresh": "refresh"} + expected_types: typing.Any = {"refresh": None} + response = client.tokens.rotate(refresh="refresh") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.tokens.rotate(refresh="refresh") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_users.py b/tests/test_users.py new file mode 100644 index 000000000..e6e5582b0 --- /dev/null +++ b/tests/test_users.py @@ -0,0 +1,226 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_reset_token(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"token": "token"} + expected_types: typing.Any = {"token": None} + response = client.users.reset_token() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.users.reset_token() + validate_response(async_response, expected_response, expected_types) + + +async def test_get_token(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"detail": "detail"} + expected_types: typing.Any = {"detail": None} + response = client.users.get_token() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.users.get_token() + validate_response(async_response, expected_response, expected_types) + + +async def test_whoami(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "username": "username", + "email": "email", + "last_activity": "2024-01-15T09:30:00Z", + "avatar": "avatar", + "initials": "initials", + "phone": "phone", + "active_organization": 1, + "allow_newsletters": True, + "date_joined": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "first_name": None, + "last_name": None, + "username": None, + "email": None, + "last_activity": "datetime", + "avatar": None, + "initials": None, + "phone": None, + "active_organization": "integer", + "allow_newsletters": None, + "date_joined": "datetime", + } + response = client.users.whoami() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.users.whoami() + validate_response(async_response, expected_response, expected_types) + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "username": "username", + "email": "email", + "last_activity": "2024-01-15T09:30:00Z", + "avatar": "avatar", + "initials": "initials", + "phone": "phone", + "active_organization": 1, + "allow_newsletters": True, + "date_joined": "2024-01-15T09:30:00Z", + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "first_name": None, + "last_name": None, + "username": None, + "email": None, + "last_activity": "datetime", + "avatar": None, + "initials": None, + "phone": None, + "active_organization": "integer", + "allow_newsletters": None, + "date_joined": "datetime", + } + }, + ) + response = client.users.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.users.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "username": "username", + "email": "email", + "last_activity": "2024-01-15T09:30:00Z", + "avatar": "avatar", + "initials": "initials", + "phone": "phone", + "active_organization": 1, + "allow_newsletters": True, + "date_joined": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "first_name": None, + "last_name": None, + "username": None, + "email": None, + "last_activity": "datetime", + "avatar": None, + "initials": None, + "phone": None, + "active_organization": "integer", + "allow_newsletters": None, + "date_joined": "datetime", + } + response = client.users.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.users.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "username": "username", + "email": "email", + "last_activity": "2024-01-15T09:30:00Z", + "avatar": "avatar", + "initials": "initials", + "phone": "phone", + "active_organization": 1, + "allow_newsletters": True, + "date_joined": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "first_name": None, + "last_name": None, + "username": None, + "email": None, + "last_activity": "datetime", + "avatar": None, + "initials": None, + "phone": None, + "active_organization": "integer", + "allow_newsletters": None, + "date_joined": "datetime", + } + response = client.users.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.users.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.users.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.users.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "first_name": "first_name", + "last_name": "last_name", + "username": "username", + "email": "email", + "last_activity": "2024-01-15T09:30:00Z", + "avatar": "avatar", + "initials": "initials", + "phone": "phone", + "active_organization": 1, + "allow_newsletters": True, + "date_joined": "2024-01-15T09:30:00Z", + } + expected_types: typing.Any = { + "id": "integer", + "first_name": None, + "last_name": None, + "username": None, + "email": None, + "last_activity": "datetime", + "avatar": None, + "initials": None, + "phone": None, + "active_organization": "integer", + "allow_newsletters": None, + "date_joined": "datetime", + } + response = client.users.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.users.update(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_versions.py b/tests/test_versions.py new file mode 100644 index 000000000..16e3f4d37 --- /dev/null +++ b/tests/test_versions.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "release": "release", + "label-studio-os-package": {"key": "value"}, + "label-studio-os-backend": {"key": "value"}, + "label-studio-frontend": {"key": "value"}, + "dm2": {"key": "value"}, + "label-studio-converter": {"key": "value"}, + "edition": "Community", + "lsf": {"key": "value"}, + "backend": {"key": "value"}, + } + expected_types: typing.Any = { + "release": None, + "label-studio-os-package": ("dict", {0: (None, None)}), + "label-studio-os-backend": ("dict", {0: (None, None)}), + "label-studio-frontend": ("dict", {0: (None, None)}), + "dm2": ("dict", {0: (None, None)}), + "label-studio-converter": ("dict", {0: (None, None)}), + "edition": None, + "lsf": ("dict", {0: (None, None)}), + "backend": ("dict", {0: (None, None)}), + } + response = client.versions.get() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.versions.get() + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_views.py b/tests/test_views.py new file mode 100644 index 000000000..b6dc97106 --- /dev/null +++ b/tests/test_views.py @@ -0,0 +1,178 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "filter_group": { + "id": 1, + "filters": [{"column": "column", "type": "type", "operator": "operator"}], + "conjunction": "conjunction", + }, + "data": {"key": "value"}, + "ordering": {"key": "value"}, + "selected_items": {"key": "value"}, + "user": 1, + "project": 1, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "filter_group": { + "id": "integer", + "filters": ( + "list", + {0: {"column": None, "type": None, "operator": None}}, + ), + "conjunction": None, + }, + "data": ("dict", {0: (None, None)}), + "ordering": ("dict", {0: (None, None)}), + "selected_items": ("dict", {0: (None, None)}), + "user": "integer", + "project": "integer", + } + }, + ) + response = client.views.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.views.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "filter_group": { + "id": 1, + "filters": [{"column": "column", "type": "type", "operator": "operator"}], + "conjunction": "conjunction", + }, + "data": {"key": "value"}, + "ordering": {"key": "value"}, + "selected_items": {"key": "value"}, + "user": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "filter_group": { + "id": "integer", + "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), + "conjunction": None, + }, + "data": ("dict", {0: (None, None)}), + "ordering": ("dict", {0: (None, None)}), + "selected_items": ("dict", {0: (None, None)}), + "user": "integer", + "project": "integer", + } + response = client.views.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.views.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_delete_all(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.views.delete_all(project=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.views.delete_all(project=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "filter_group": { + "id": 1, + "filters": [{"column": "column", "type": "type", "operator": "operator"}], + "conjunction": "conjunction", + }, + "data": {"key": "value"}, + "ordering": {"key": "value"}, + "selected_items": {"key": "value"}, + "user": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "filter_group": { + "id": "integer", + "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), + "conjunction": None, + }, + "data": ("dict", {0: (None, None)}), + "ordering": ("dict", {0: (None, None)}), + "selected_items": ("dict", {0: (None, None)}), + "user": "integer", + "project": "integer", + } + response = client.views.get(id="id") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.views.get(id="id") + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.views.delete(id="id") # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.views.delete(id="id") # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "filter_group": { + "id": 1, + "filters": [{"column": "column", "type": "type", "operator": "operator"}], + "conjunction": "conjunction", + }, + "data": {"key": "value"}, + "ordering": {"key": "value"}, + "selected_items": {"key": "value"}, + "user": 1, + "project": 1, + } + expected_types: typing.Any = { + "id": "integer", + "filter_group": { + "id": "integer", + "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), + "conjunction": None, + }, + "data": ("dict", {0: (None, None)}), + "ordering": ("dict", {0: (None, None)}), + "selected_items": ("dict", {0: (None, None)}), + "user": "integer", + "project": "integer", + } + response = client.views.update(id="id") + validate_response(response, expected_response, expected_types) + + async_response = await async_client.views.update(id="id") + validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_workspaces.py b/tests/test_workspaces.py new file mode 100644 index 000000000..dc43be593 --- /dev/null +++ b/tests/test_workspaces.py @@ -0,0 +1,154 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from .utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + }, + ) + response = client.workspaces.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + response = client.workspaces.create() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.create() + validate_response(async_response, expected_response, expected_types) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + response = client.workspaces.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.workspaces.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.workspaces.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "id": 1, + "title": "title", + "description": "description", + "is_public": True, + "is_personal": True, + "is_archived": True, + "created_at": "2024-01-15T09:30:00Z", + "updated_at": "2024-01-15T09:30:00Z", + "created_by": 1, + "color": "color", + } + expected_types: typing.Any = { + "id": "integer", + "title": None, + "description": None, + "is_public": None, + "is_personal": None, + "is_archived": None, + "created_at": "datetime", + "updated_at": "datetime", + "created_by": "integer", + "color": None, + } + response = client.workspaces.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.update(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/utilities.py b/tests/utilities.py new file mode 100644 index 000000000..3d228806a --- /dev/null +++ b/tests/utilities.py @@ -0,0 +1,162 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +import uuid + +from dateutil import parser + +import pydantic + + +def cast_field(json_expectation: typing.Any, type_expectation: typing.Any) -> typing.Any: + # Cast these specific types which come through as string and expect our + # models to cast to the correct type. + if type_expectation == "uuid": + return uuid.UUID(json_expectation) + elif type_expectation == "date": + return parser.parse(json_expectation).date() + elif type_expectation == "datetime": + return parser.parse(json_expectation) + elif type_expectation == "set": + return set(json_expectation) + elif type_expectation == "integer": + # Necessary as we allow numeric keys, but JSON makes them strings + return int(json_expectation) + + return json_expectation + + +def validate_field(response: typing.Any, json_expectation: typing.Any, type_expectation: typing.Any) -> None: + # Allow for an escape hatch if the object cannot be validated + if type_expectation == "no_validate": + return + + is_container_of_complex_type = False + # Parse types in containers, note that dicts are handled within `validate_response` + if isinstance(json_expectation, list): + if isinstance(type_expectation, tuple): + container_expectation = type_expectation[0] + contents_expectation = type_expectation[1] + + cast_json_expectation = [] + for idx, ex in enumerate(json_expectation): + if isinstance(contents_expectation, dict): + entry_expectation = contents_expectation.get(idx) + if isinstance(entry_expectation, dict): + is_container_of_complex_type = True + validate_response( + response=response[idx], + json_expectation=ex, + type_expectations=entry_expectation, + ) + else: + cast_json_expectation.append(cast_field(ex, entry_expectation)) + else: + cast_json_expectation.append(ex) + json_expectation = cast_json_expectation + + # Note that we explicitly do not allow for sets of pydantic models as they are not hashable, so + # if any of the values of the set have a type_expectation of a dict, we're assuming it's a pydantic + # model and keeping it a list. + if container_expectation != "set" or not any( + map( + lambda value: isinstance(value, dict), + list(contents_expectation.values()), + ) + ): + json_expectation = cast_field(json_expectation, container_expectation) + elif isinstance(type_expectation, tuple): + container_expectation = type_expectation[0] + contents_expectation = type_expectation[1] + if isinstance(contents_expectation, dict): + json_expectation = { + cast_field( + key, + contents_expectation.get(idx)[0] # type: ignore + if contents_expectation.get(idx) is not None + else None, + ): cast_field( + value, + contents_expectation.get(idx)[1] # type: ignore + if contents_expectation.get(idx) is not None + else None, + ) + for idx, (key, value) in enumerate(json_expectation.items()) + } + else: + json_expectation = cast_field(json_expectation, container_expectation) + elif type_expectation is not None: + json_expectation = cast_field(json_expectation, type_expectation) + + # When dealing with containers of models, etc. we're validating them implicitly, so no need to check the resultant list + if not is_container_of_complex_type: + assert ( + json_expectation == response + ), "Primitives found, expected: {0} (type: {1}), Actual: {2} (type: {3})".format( + json_expectation, type(json_expectation), response, type(response) + ) + + +# Arg type_expectations is a deeply nested structure that matches the response, but with the values replaced with the expected types +def validate_response(response: typing.Any, json_expectation: typing.Any, type_expectations: typing.Any) -> None: + # Allow for an escape hatch if the object cannot be validated + if type_expectations == "no_validate": + return + + if ( + not isinstance(response, list) + and not isinstance(response, dict) + and not issubclass(type(response), pydantic.BaseModel) + ): + validate_field( + response=response, + json_expectation=json_expectation, + type_expectation=type_expectations, + ) + return + + if isinstance(response, list): + assert len(response) == len(json_expectation), "Length mismatch, expected: {0}, Actual: {1}".format( + len(response), len(json_expectation) + ) + content_expectation = type_expectations + if isinstance(type_expectations, tuple): + content_expectation = type_expectations[1] + for idx, item in enumerate(response): + validate_response( + response=item, + json_expectation=json_expectation[idx], + type_expectations=content_expectation[idx], + ) + else: + response_json = response + if issubclass(type(response), pydantic.BaseModel): + response_json = response.dict(by_alias=True) + + for key, value in json_expectation.items(): + assert key in response_json, "Field {0} not found within the response object: {1}".format( + key, response_json + ) + + type_expectation = None + if type_expectations is not None and isinstance(type_expectations, dict): + type_expectation = type_expectations.get(key) + + # If your type_expectation is a tuple then you have a container field, process it as such + # Otherwise, we're just validating a single field that's a pydantic model. + if isinstance(value, dict) and not isinstance(type_expectation, tuple): + validate_response( + response=response_json[key], + json_expectation=value, + type_expectations=type_expectation, + ) + else: + validate_field( + response=response_json[key], + json_expectation=value, + type_expectation=type_expectation, + ) + + # Ensure there are no additional fields here either + del response_json[key] + assert len(response_json) == 0, "Additional fields found, expected None: {0}".format(response_json) diff --git a/tests/utils/assets/models/__init__.py b/tests/utils/assets/models/__init__.py index 2cf012635..3a1c852e7 100644 --- a/tests/utils/assets/models/__init__.py +++ b/tests/utils/assets/models/__init__.py @@ -5,7 +5,7 @@ from .circle import CircleParams from .object_with_defaults import ObjectWithDefaultsParams from .object_with_optional_field import ObjectWithOptionalFieldParams -from .shape import Shape_CircleParams, Shape_SquareParams, ShapeParams +from .shape import ShapeParams, Shape_CircleParams, Shape_SquareParams from .square import SquareParams from .undiscriminated_shape import UndiscriminatedShapeParams diff --git a/tests/utils/assets/models/circle.py b/tests/utils/assets/models/circle.py index c54c57b40..09b8e0647 100644 --- a/tests/utils/assets/models/circle.py +++ b/tests/utils/assets/models/circle.py @@ -3,7 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions - +import typing_extensions from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/object_with_defaults.py b/tests/utils/assets/models/object_with_defaults.py index a977b1d2a..ef14f7b2c 100644 --- a/tests/utils/assets/models/object_with_defaults.py +++ b/tests/utils/assets/models/object_with_defaults.py @@ -3,6 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions +import typing_extensions class ObjectWithDefaultsParams(typing_extensions.TypedDict): diff --git a/tests/utils/assets/models/object_with_optional_field.py b/tests/utils/assets/models/object_with_optional_field.py index 802e01ff2..ee4f9dbfe 100644 --- a/tests/utils/assets/models/object_with_optional_field.py +++ b/tests/utils/assets/models/object_with_optional_field.py @@ -2,17 +2,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt +import typing_extensions import typing -import uuid - import typing_extensions +from label_studio_sdk.core.serialization import FieldMetadata +import datetime as dt +import uuid from .color import Color from .shape import ShapeParams from .undiscriminated_shape import UndiscriminatedShapeParams -from label_studio_sdk.core.serialization import FieldMetadata - class ObjectWithOptionalFieldParams(typing_extensions.TypedDict): literal: typing.Literal["lit_one"] diff --git a/tests/utils/assets/models/shape.py b/tests/utils/assets/models/shape.py index e9d51e32c..820dec7a6 100644 --- a/tests/utils/assets/models/shape.py +++ b/tests/utils/assets/models/shape.py @@ -3,11 +3,9 @@ # This file was auto-generated by Fern from our API Definition. from __future__ import annotations - -import typing - import typing_extensions - +import typing_extensions +import typing from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/square.py b/tests/utils/assets/models/square.py index d9d65afca..b70897be3 100644 --- a/tests/utils/assets/models/square.py +++ b/tests/utils/assets/models/square.py @@ -3,7 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions - +import typing_extensions from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/undiscriminated_shape.py b/tests/utils/assets/models/undiscriminated_shape.py index 99f12b300..68876a23c 100644 --- a/tests/utils/assets/models/undiscriminated_shape.py +++ b/tests/utils/assets/models/undiscriminated_shape.py @@ -3,7 +3,6 @@ # This file was auto-generated by Fern from our API Definition. import typing - from .circle import CircleParams from .square import SquareParams diff --git a/tests/utils/test_http_client.py b/tests/utils/test_http_client.py index 1a2d02245..ba375d043 100644 --- a/tests/utils/test_http_client.py +++ b/tests/utils/test_http_client.py @@ -14,7 +14,10 @@ def test_get_json_request_body() -> None: assert data_body is None json_body_extras, data_body_extras = get_request_body( - json={"goodbye": "world"}, data=None, request_options=get_request_options(), omit=None + json={"goodbye": "world"}, + data=None, + request_options=get_request_options(), + omit=None, ) assert json_body_extras == {"goodbye": "world", "see you": "later"} @@ -27,7 +30,10 @@ def test_get_files_request_body() -> None: assert json_body is None json_body_extras, data_body_extras = get_request_body( - json=None, data={"goodbye": "world"}, request_options=get_request_options(), omit=None + json=None, + data={"goodbye": "world"}, + request_options=get_request_options(), + omit=None, ) assert data_body_extras == {"goodbye": "world", "see you": "later"} diff --git a/tests/utils/test_query_encoding.py b/tests/utils/test_query_encoding.py index 6276c4f4f..c995dff43 100644 --- a/tests/utils/test_query_encoding.py +++ b/tests/utils/test_query_encoding.py @@ -15,14 +15,26 @@ def test_query_encoding_deep_objects() -> None: def test_query_encoding_deep_object_arrays() -> None: - assert encode_query({"objects": [{"key": "hello", "value": "world"}, {"key": "foo", "value": "bar"}]}) == [ + assert encode_query( + { + "objects": [ + {"key": "hello", "value": "world"}, + {"key": "foo", "value": "bar"}, + ] + } + ) == [ ("objects[key]", "hello"), ("objects[value]", "world"), ("objects[key]", "foo"), ("objects[value]", "bar"), ] assert encode_query( - {"users": [{"name": "string", "tags": ["string"]}, {"name": "string2", "tags": ["string2", "string3"]}]} + { + "users": [ + {"name": "string", "tags": ["string"]}, + {"name": "string2", "tags": ["string2", "string3"]}, + ] + } ) == [ ("users[name]", "string"), ("users[tags]", "string"), @@ -34,4 +46,4 @@ def test_query_encoding_deep_object_arrays() -> None: def test_encode_query_with_none() -> None: encoded = encode_query(None) - assert encoded is None + assert encoded == None diff --git a/tests/utils/test_serialization.py b/tests/utils/test_serialization.py index 2654074e2..1f7b0daf8 100644 --- a/tests/utils/test_serialization.py +++ b/tests/utils/test_serialization.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from typing import Any, List - -from .assets.models import ObjectWithOptionalFieldParams, ShapeParams +from typing import List, Any from label_studio_sdk.core.serialization import convert_and_respect_annotation_metadata +from .assets.models import ShapeParams, ObjectWithOptionalFieldParams + UNION_TEST: ShapeParams = {"radius_measurement": 1.0, "shape_type": "circle", "id": "1"} UNION_TEST_CONVERTED = {"shapeType": "circle", "radiusMeasurement": 1.0, "id": "1"} @@ -21,21 +21,51 @@ def test_convert_and_respect_annotation_metadata() -> None: converted = convert_and_respect_annotation_metadata( object_=data, annotation=ObjectWithOptionalFieldParams, direction="write" ) - assert converted == {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"} + assert converted == { + "string": "string", + "long": 12345, + "bool": True, + "literal": "lit_one", + "any": "any", + } def test_convert_and_respect_annotation_metadata_in_list() -> None: data: List[ObjectWithOptionalFieldParams] = [ - {"string": "string", "long_": 12345, "bool_": True, "literal": "lit_one", "any": "any"}, - {"string": "another string", "long_": 67890, "list_": [], "literal": "lit_one", "any": "any"}, + { + "string": "string", + "long_": 12345, + "bool_": True, + "literal": "lit_one", + "any": "any", + }, + { + "string": "another string", + "long_": 67890, + "list_": [], + "literal": "lit_one", + "any": "any", + }, ] converted = convert_and_respect_annotation_metadata( object_=data, annotation=List[ObjectWithOptionalFieldParams], direction="write" ) assert converted == [ - {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"}, - {"string": "another string", "long": 67890, "list": [], "literal": "lit_one", "any": "any"}, + { + "string": "string", + "long": 12345, + "bool": True, + "literal": "lit_one", + "any": "any", + }, + { + "string": "another string", + "long": 67890, + "list": [], + "literal": "lit_one", + "any": "any", + }, ] diff --git a/tests/workspaces/__init__.py b/tests/workspaces/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/tests/workspaces/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/tests/workspaces/test_members.py b/tests/workspaces/test_members.py new file mode 100644 index 000000000..be7fcd998 --- /dev/null +++ b/tests/workspaces/test_members.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [{"user": {"key": "value"}}] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + {0: {"user": ("dict", {0: (None, None)})}}, + ) + response = client.workspaces.members.list(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.members.list(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = {"user": 1} + expected_types: typing.Any = {"user": "integer"} + response = client.workspaces.members.create(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.workspaces.members.create(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] + is None + ) From f9965c8ca947c4f74068433df3d8a20c9e244302 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Thu, 10 Jul 2025 08:49:36 +0000 Subject: [PATCH 4/4] SDK regeneration --- .gitignore | 4 +- .mock/definition/__package__.yml | 37 +- .mock/definition/files.yml | 4 +- .mock/definition/projects.yml | 42 +- .mock/definition/projects/exports.yml | 2 +- .mock/definition/prompts.yml | 8 + .mock/fern.config.json | 2 +- .mock/openapi/openapi.yaml | 94 +- poetry.lock | 985 ++++++------- pyproject.toml | 33 +- reference.md | 38 +- requirements.txt | 13 +- src/label_studio_sdk/__init__.py | 6 + src/label_studio_sdk/actions/__init__.py | 2 + src/label_studio_sdk/actions/client.py | 148 +- src/label_studio_sdk/actions/raw_client.py | 223 +++ .../actions/types/__init__.py | 2 + .../types/actions_create_request_filters.py | 14 +- ...tions_create_request_filters_items_item.py | 18 +- .../actions_create_request_selected_items.py | 9 +- ..._create_request_selected_items_excluded.py | 8 +- ..._create_request_selected_items_included.py | 8 +- src/label_studio_sdk/annotations/__init__.py | 2 + src/label_studio_sdk/annotations/client.py | 392 ++---- .../annotations/raw_client.py | 794 +++++++++++ .../annotations/types/__init__.py | 2 + ...ions_create_bulk_request_selected_items.py | 8 +- .../annotations_create_bulk_response_item.py | 4 +- src/label_studio_sdk/base_client.py | 75 +- src/label_studio_sdk/comments/__init__.py | 2 + src/label_studio_sdk/comments/client.py | 279 +--- src/label_studio_sdk/comments/raw_client.py | 529 +++++++ src/label_studio_sdk/core/__init__.py | 5 + src/label_studio_sdk/core/api_error.py | 18 +- src/label_studio_sdk/core/file.py | 3 +- src/label_studio_sdk/core/force_multipart.py | 16 + src/label_studio_sdk/core/http_client.py | 96 +- src/label_studio_sdk/core/http_response.py | 55 + src/label_studio_sdk/core/jsonable_encoder.py | 1 - src/label_studio_sdk/core/pagination.py | 94 +- .../core/pydantic_utilities.py | 185 +-- src/label_studio_sdk/core/serialization.py | 10 +- src/label_studio_sdk/errors/__init__.py | 2 + .../errors/bad_request_error.py | 7 +- .../errors/internal_server_error.py | 6 +- .../errors/not_found_error.py | 7 +- .../errors/unauthorized_error.py | 7 +- .../export_storage/__init__.py | 2 + .../export_storage/azure/__init__.py | 2 + .../export_storage/azure/client.py | 433 ++---- .../export_storage/azure/raw_client.py | 881 ++++++++++++ .../export_storage/azure/types/__init__.py | 2 + .../azure/types/azure_create_response.py | 4 +- .../azure/types/azure_update_response.py | 4 +- src/label_studio_sdk/export_storage/client.py | 130 +- .../export_storage/gcs/__init__.py | 2 + .../export_storage/gcs/client.py | 433 ++---- .../export_storage/gcs/raw_client.py | 881 ++++++++++++ .../export_storage/gcs/types/__init__.py | 2 + .../gcs/types/gcs_create_response.py | 4 +- .../gcs/types/gcs_update_response.py | 4 +- .../export_storage/local/__init__.py | 2 + .../export_storage/local/client.py | 409 ++---- .../export_storage/local/raw_client.py | 821 +++++++++++ .../export_storage/local/types/__init__.py | 2 + .../local/types/local_create_response.py | 4 +- .../local/types/local_update_response.py | 4 +- .../export_storage/raw_client.py | 93 ++ .../export_storage/redis/__init__.py | 2 + .../export_storage/redis/client.py | 445 ++---- .../export_storage/redis/raw_client.py | 911 ++++++++++++ .../export_storage/redis/types/__init__.py | 2 + .../redis/types/redis_create_response.py | 4 +- .../redis/types/redis_update_response.py | 4 +- .../export_storage/s3/__init__.py | 2 + .../export_storage/s3/client.py | 481 ++----- .../export_storage/s3/raw_client.py | 999 +++++++++++++ .../export_storage/s3/types/__init__.py | 2 + .../s3/types/s3create_response.py | 4 +- .../s3/types/s3update_response.py | 4 +- .../export_storage/s3s/__init__.py | 2 + .../export_storage/s3s/client.py | 413 ++---- .../export_storage/s3s/raw_client.py | 827 +++++++++++ .../export_storage/types/__init__.py | 2 + ...export_storage_list_types_response_item.py | 4 +- src/label_studio_sdk/files/__init__.py | 2 + src/label_studio_sdk/files/client.py | 261 +--- src/label_studio_sdk/files/raw_client.py | 523 +++++++ .../import_storage/__init__.py | 2 + .../import_storage/azure/__init__.py | 2 + .../import_storage/azure/client.py | 469 ++----- .../import_storage/azure/raw_client.py | 981 +++++++++++++ .../import_storage/azure/types/__init__.py | 2 + .../azure/types/azure_create_response.py | 4 +- .../azure/types/azure_update_response.py | 4 +- src/label_studio_sdk/import_storage/client.py | 130 +- .../import_storage/gcs/__init__.py | 2 + .../import_storage/gcs/client.py | 469 ++----- .../import_storage/gcs/raw_client.py | 981 +++++++++++++ .../import_storage/gcs/types/__init__.py | 2 + .../gcs/types/gcs_create_response.py | 4 +- .../gcs/types/gcs_update_response.py | 4 +- .../import_storage/local/__init__.py | 2 + .../import_storage/local/client.py | 409 ++---- .../import_storage/local/raw_client.py | 827 +++++++++++ .../import_storage/local/types/__init__.py | 2 + .../local/types/local_create_response.py | 4 +- .../local/types/local_update_response.py | 4 +- .../import_storage/raw_client.py | 93 ++ .../import_storage/redis/__init__.py | 2 + .../import_storage/redis/client.py | 445 ++---- .../import_storage/redis/raw_client.py | 917 ++++++++++++ .../import_storage/redis/types/__init__.py | 2 + .../redis/types/redis_create_response.py | 4 +- .../redis/types/redis_update_response.py | 4 +- .../import_storage/s3/__init__.py | 2 + .../import_storage/s3/client.py | 529 +++---- .../import_storage/s3/raw_client.py | 1129 +++++++++++++++ .../import_storage/s3/types/__init__.py | 2 + .../s3/types/s3create_response.py | 4 +- .../s3/types/s3update_response.py | 4 +- .../import_storage/s3s/__init__.py | 2 + .../import_storage/s3s/client.py | 501 ++----- .../import_storage/s3s/raw_client.py | 1047 ++++++++++++++ .../import_storage/types/__init__.py | 2 + ...import_storage_list_types_response_item.py | 4 +- src/label_studio_sdk/jwt_settings/__init__.py | 2 + src/label_studio_sdk/jwt_settings/client.py | 128 +- .../jwt_settings/raw_client.py | 212 +++ src/label_studio_sdk/ml/__init__.py | 2 + src/label_studio_sdk/ml/client.py | 472 ++----- src/label_studio_sdk/ml/raw_client.py | 968 +++++++++++++ src/label_studio_sdk/ml/types/__init__.py | 2 + .../ml/types/ml_create_response.py | 4 +- .../ml/types/ml_update_response.py | 4 +- .../model_providers/__init__.py | 2 + .../model_providers/client.py | 396 ++---- .../model_providers/raw_client.py | 706 ++++++++++ src/label_studio_sdk/predictions/__init__.py | 2 + src/label_studio_sdk/predictions/client.py | 271 +--- .../predictions/raw_client.py | 573 ++++++++ src/label_studio_sdk/projects/__init__.py | 2 + src/label_studio_sdk/projects/client.py | 725 +++------- .../projects/exports/__init__.py | 2 + .../projects/exports/client.py | 552 ++------ .../projects/exports/raw_client.py | 1038 ++++++++++++++ .../projects/exports/types/__init__.py | 2 + .../exports/types/exports_convert_response.py | 6 +- .../exports_list_formats_response_item.py | 6 +- .../projects/pauses/__init__.py | 2 + .../projects/pauses/client.py | 289 +--- .../projects/pauses/raw_client.py | 543 ++++++++ src/label_studio_sdk/projects/raw_client.py | 1237 +++++++++++++++++ .../projects/types/__init__.py | 2 + .../types/projects_create_response.py | 6 +- .../types/projects_import_tasks_response.py | 4 +- .../projects/types/projects_list_response.py | 6 +- .../types/projects_update_response.py | 6 +- src/label_studio_sdk/prompts/__init__.py | 2 + src/label_studio_sdk/prompts/client.py | 535 ++----- .../prompts/indicators/__init__.py | 2 + .../prompts/indicators/client.py | 129 +- .../prompts/indicators/raw_client.py | 183 +++ src/label_studio_sdk/prompts/raw_client.py | 918 ++++++++++++ src/label_studio_sdk/prompts/runs/__init__.py | 2 + src/label_studio_sdk/prompts/runs/client.py | 198 +-- .../prompts/runs/raw_client.py | 348 +++++ .../prompts/runs/types/__init__.py | 2 + .../prompts/types/__init__.py | 2 + ...ictions_request_failed_predictions_item.py | 4 +- ...ompts_batch_failed_predictions_response.py | 4 +- ..._batch_predictions_request_results_item.py | 4 +- .../prompts_batch_predictions_response.py | 4 +- .../prompts/versions/__init__.py | 2 + .../prompts/versions/client.py | 546 ++------ .../prompts/versions/raw_client.py | 1008 ++++++++++++++ src/label_studio_sdk/tasks/__init__.py | 2 + src/label_studio_sdk/tasks/client.py | 412 ++---- src/label_studio_sdk/tasks/raw_client.py | 816 +++++++++++ src/label_studio_sdk/tasks/types/__init__.py | 2 + .../tasks/types/tasks_list_response.py | 6 +- src/label_studio_sdk/tokens/__init__.py | 2 + src/label_studio_sdk/tokens/client.py | 329 +---- src/label_studio_sdk/tokens/raw_client.py | 495 +++++++ src/label_studio_sdk/types/__init__.py | 6 + .../types/access_token_response.py | 6 +- src/label_studio_sdk/types/annotation.py | 6 +- .../types/annotation_completed_by.py | 1 + .../types/annotation_filter_options.py | 4 +- .../types/annotations_dm_field.py | 6 +- .../types/api_token_response.py | 6 +- .../types/azure_blob_export_storage.py | 6 +- .../types/azure_blob_export_storage_status.py | 3 +- .../types/azure_blob_import_storage.py | 6 +- .../types/azure_blob_import_storage_status.py | 3 +- src/label_studio_sdk/types/base_task.py | 8 +- .../types/base_task_updated_by.py | 3 +- src/label_studio_sdk/types/base_user.py | 6 +- src/label_studio_sdk/types/comment.py | 6 +- .../types/converted_format.py | 6 +- .../types/data_manager_task_serializer.py | 23 +- ...er_task_serializer_comment_authors_item.py | 5 + ...ata_manager_task_serializer_drafts_item.py | 6 +- ...anager_task_serializer_predictions_item.py | 9 +- ...k_serializer_predictions_item_model_run.py | 5 + src/label_studio_sdk/types/export.py | 10 +- src/label_studio_sdk/types/export_snapshot.py | 14 +- src/label_studio_sdk/types/file_upload.py | 4 +- src/label_studio_sdk/types/filter.py | 4 +- src/label_studio_sdk/types/filter_group.py | 6 +- .../types/gcs_export_storage.py | 6 +- .../types/gcs_export_storage_status.py | 3 +- .../types/gcs_import_storage.py | 6 +- .../types/gcs_import_storage_status.py | 3 +- src/label_studio_sdk/types/inference_run.py | 10 +- .../types/inference_run_cost_estimate.py | 4 +- .../types/inference_run_status.py | 3 +- .../types/jwt_settings_response.py | 6 +- .../types/key_indicator_value.py | 4 +- src/label_studio_sdk/types/key_indicators.py | 1 + .../types/key_indicators_item.py | 10 +- ...ey_indicators_item_additional_kpis_item.py | 4 +- .../key_indicators_item_extra_kpis_item.py | 4 +- .../types/local_files_export_storage.py | 6 +- .../local_files_export_storage_status.py | 3 +- .../types/local_files_import_storage.py | 6 +- .../local_files_import_storage_status.py | 3 +- src/label_studio_sdk/types/ml_backend.py | 8 +- .../types/model_provider_connection.py | 18 +- .../model_provider_connection_provider.py | 11 +- src/label_studio_sdk/types/pause.py | 8 +- src/label_studio_sdk/types/prediction.py | 6 +- src/label_studio_sdk/types/project.py | 12 +- src/label_studio_sdk/types/project_import.py | 8 +- .../types/project_label_config.py | 6 +- .../types/project_sampling.py | 3 +- src/label_studio_sdk/types/prompt.py | 10 +- .../types/prompt_associated_projects_item.py | 1 + .../prompt_associated_projects_item_id.py | 4 +- src/label_studio_sdk/types/prompt_version.py | 10 +- .../types/prompt_version_provider.py | 11 +- .../types/redis_export_storage.py | 6 +- .../types/redis_export_storage_status.py | 3 +- .../types/redis_import_storage.py | 6 +- .../types/redis_import_storage_status.py | 3 +- .../types/refined_prompt_response.py | 8 +- .../types/rotate_token_response.py | 6 +- .../types/s3export_storage.py | 8 +- .../types/s3export_storage_status.py | 3 +- .../types/s3import_storage.py | 8 +- .../types/s3import_storage_status.py | 3 +- .../types/s3s_export_storage.py | 6 +- .../types/s3s_import_storage.py | 8 +- .../types/s3s_import_storage_status.py | 3 +- .../types/serialization_option.py | 4 +- .../types/serialization_options.py | 9 +- src/label_studio_sdk/types/task.py | 6 +- .../types/task_filter_options.py | 4 +- src/label_studio_sdk/types/user_simple.py | 4 +- src/label_studio_sdk/types/view.py | 6 +- src/label_studio_sdk/types/webhook.py | 6 +- .../types/webhook_serializer_for_update.py | 10 +- src/label_studio_sdk/types/workspace.py | 6 +- src/label_studio_sdk/users/__init__.py | 2 + src/label_studio_sdk/users/client.py | 437 ++---- src/label_studio_sdk/users/raw_client.py | 833 +++++++++++ src/label_studio_sdk/users/types/__init__.py | 2 + .../users/types/users_get_token_response.py | 4 +- .../users/types/users_reset_token_response.py | 4 +- src/label_studio_sdk/versions/__init__.py | 2 + src/label_studio_sdk/versions/client.py | 74 +- src/label_studio_sdk/versions/raw_client.py | 91 ++ .../versions/types/__init__.py | 2 + .../versions/types/versions_get_response.py | 16 +- src/label_studio_sdk/views/__init__.py | 2 + src/label_studio_sdk/views/client.py | 322 +---- src/label_studio_sdk/views/raw_client.py | 574 ++++++++ src/label_studio_sdk/views/types/__init__.py | 2 + .../views/types/views_create_request_data.py | 6 +- .../views_create_request_data_filters.py | 14 +- ..._create_request_data_filters_items_item.py | 18 +- .../views/types/views_update_request_data.py | 6 +- .../views_update_request_data_filters.py | 14 +- ..._update_request_data_filters_items_item.py | 18 +- src/label_studio_sdk/webhooks/__init__.py | 2 + src/label_studio_sdk/webhooks/client.py | 413 ++---- src/label_studio_sdk/webhooks/raw_client.py | 824 +++++++++++ .../webhooks/types/__init__.py | 2 + src/label_studio_sdk/workspaces/__init__.py | 2 + src/label_studio_sdk/workspaces/client.py | 296 +--- .../workspaces/members/__init__.py | 2 + .../workspaces/members/client.py | 197 +-- .../workspaces/members/raw_client.py | 290 ++++ .../workspaces/members/types/__init__.py | 2 + .../members/types/members_create_response.py | 4 +- .../types/members_list_response_item.py | 4 +- src/label_studio_sdk/workspaces/raw_client.py | 561 ++++++++ tests/__init__.py | 2 - tests/conftest.py | 22 - tests/custom/test_client.py | 2 +- tests/export_storage/__init__.py | 2 - tests/export_storage/test_azure.py | 251 ---- tests/export_storage/test_gcs.py | 251 ---- tests/export_storage/test_local.py | 225 --- tests/export_storage/test_redis.py | 261 ---- tests/export_storage/test_s3.py | 291 ---- tests/export_storage/test_s3s.py | 175 --- tests/import_storage/__init__.py | 2 - tests/import_storage/test_azure.py | 269 ---- tests/import_storage/test_gcs.py | 269 ---- tests/import_storage/test_local.py | 219 --- tests/import_storage/test_redis.py | 255 ---- tests/import_storage/test_s3.py | 319 ----- tests/import_storage/test_s3s.py | 329 ----- tests/projects/__init__.py | 2 - tests/projects/test_exports.py | 265 ---- tests/projects/test_pauses.py | 154 -- tests/prompts/__init__.py | 2 - tests/prompts/test_indicators.py | 47 - tests/prompts/test_runs.py | 74 - tests/prompts/test_versions.py | 280 ---- tests/test_actions.py | 65 - tests/test_annotations.py | 467 ------- tests/test_comments.py | 154 -- tests/test_export_storage.py | 19 - tests/test_files.py | 78 -- tests/test_import_storage.py | 19 - tests/test_jwt_settings.py | 44 - tests/test_ml.py | 229 --- tests/test_model_providers.py | 194 --- tests/test_predictions.py | 404 ------ tests/test_projects.py | 296 ---- tests/test_prompts.py | 186 --- tests/test_tasks.py | 353 ----- tests/test_tokens.py | 66 - tests/test_users.py | 226 --- tests/test_versions.py | 36 - tests/test_views.py | 178 --- tests/test_workspaces.py | 154 -- tests/utilities.py | 162 --- tests/utils/assets/models/__init__.py | 2 +- tests/utils/assets/models/circle.py | 2 +- .../assets/models/object_with_defaults.py | 1 - .../models/object_with_optional_field.py | 9 +- tests/utils/assets/models/shape.py | 6 +- tests/utils/assets/models/square.py | 2 +- .../assets/models/undiscriminated_shape.py | 1 + tests/utils/test_http_client.py | 10 +- tests/utils/test_query_encoding.py | 18 +- tests/utils/test_serialization.py | 46 +- tests/workspaces/__init__.py | 2 - tests/workspaces/test_members.py | 42 - 352 files changed, 30678 insertions(+), 18733 deletions(-) create mode 100644 src/label_studio_sdk/actions/raw_client.py create mode 100644 src/label_studio_sdk/annotations/raw_client.py create mode 100644 src/label_studio_sdk/comments/raw_client.py create mode 100644 src/label_studio_sdk/core/force_multipart.py create mode 100644 src/label_studio_sdk/core/http_response.py create mode 100644 src/label_studio_sdk/export_storage/azure/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/gcs/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/local/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/redis/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/s3/raw_client.py create mode 100644 src/label_studio_sdk/export_storage/s3s/raw_client.py create mode 100644 src/label_studio_sdk/files/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/azure/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/gcs/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/local/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/redis/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/s3/raw_client.py create mode 100644 src/label_studio_sdk/import_storage/s3s/raw_client.py create mode 100644 src/label_studio_sdk/jwt_settings/raw_client.py create mode 100644 src/label_studio_sdk/ml/raw_client.py create mode 100644 src/label_studio_sdk/model_providers/raw_client.py create mode 100644 src/label_studio_sdk/predictions/raw_client.py create mode 100644 src/label_studio_sdk/projects/exports/raw_client.py create mode 100644 src/label_studio_sdk/projects/pauses/raw_client.py create mode 100644 src/label_studio_sdk/projects/raw_client.py create mode 100644 src/label_studio_sdk/prompts/indicators/raw_client.py create mode 100644 src/label_studio_sdk/prompts/raw_client.py create mode 100644 src/label_studio_sdk/prompts/runs/raw_client.py create mode 100644 src/label_studio_sdk/prompts/versions/raw_client.py create mode 100644 src/label_studio_sdk/tasks/raw_client.py create mode 100644 src/label_studio_sdk/tokens/raw_client.py create mode 100644 src/label_studio_sdk/types/data_manager_task_serializer_comment_authors_item.py create mode 100644 src/label_studio_sdk/types/data_manager_task_serializer_predictions_item_model_run.py create mode 100644 src/label_studio_sdk/users/raw_client.py create mode 100644 src/label_studio_sdk/versions/raw_client.py create mode 100644 src/label_studio_sdk/views/raw_client.py create mode 100644 src/label_studio_sdk/webhooks/raw_client.py create mode 100644 src/label_studio_sdk/workspaces/members/raw_client.py create mode 100644 src/label_studio_sdk/workspaces/raw_client.py delete mode 100644 tests/__init__.py delete mode 100644 tests/conftest.py delete mode 100644 tests/export_storage/__init__.py delete mode 100644 tests/export_storage/test_azure.py delete mode 100644 tests/export_storage/test_gcs.py delete mode 100644 tests/export_storage/test_local.py delete mode 100644 tests/export_storage/test_redis.py delete mode 100644 tests/export_storage/test_s3.py delete mode 100644 tests/export_storage/test_s3s.py delete mode 100644 tests/import_storage/__init__.py delete mode 100644 tests/import_storage/test_azure.py delete mode 100644 tests/import_storage/test_gcs.py delete mode 100644 tests/import_storage/test_local.py delete mode 100644 tests/import_storage/test_redis.py delete mode 100644 tests/import_storage/test_s3.py delete mode 100644 tests/import_storage/test_s3s.py delete mode 100644 tests/projects/__init__.py delete mode 100644 tests/projects/test_exports.py delete mode 100644 tests/projects/test_pauses.py delete mode 100644 tests/prompts/__init__.py delete mode 100644 tests/prompts/test_indicators.py delete mode 100644 tests/prompts/test_runs.py delete mode 100644 tests/prompts/test_versions.py delete mode 100644 tests/test_actions.py delete mode 100644 tests/test_annotations.py delete mode 100644 tests/test_comments.py delete mode 100644 tests/test_export_storage.py delete mode 100644 tests/test_files.py delete mode 100644 tests/test_import_storage.py delete mode 100644 tests/test_jwt_settings.py delete mode 100644 tests/test_ml.py delete mode 100644 tests/test_model_providers.py delete mode 100644 tests/test_predictions.py delete mode 100644 tests/test_projects.py delete mode 100644 tests/test_prompts.py delete mode 100644 tests/test_tasks.py delete mode 100644 tests/test_tokens.py delete mode 100644 tests/test_users.py delete mode 100644 tests/test_versions.py delete mode 100644 tests/test_views.py delete mode 100644 tests/test_workspaces.py delete mode 100644 tests/utilities.py delete mode 100644 tests/workspaces/__init__.py delete mode 100644 tests/workspaces/test_members.py diff --git a/.gitignore b/.gitignore index 0da665fee..d2e4ca808 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ -dist/ .mypy_cache/ +.ruff_cache/ __pycache__/ +dist/ poetry.toml -.ruff_cache/ diff --git a/.mock/definition/__package__.yml b/.mock/definition/__package__.yml index 2dc41342e..9b3a8fe07 100644 --- a/.mock/definition/__package__.yml +++ b/.mock/definition/__package__.yml @@ -767,15 +767,15 @@ types: maxLength: 256 control_weights: type: optional> - docs: >- + docs: > Dict of weights for each control tag in metric calculation. Each - control tag (e.g. label or choice) will have it's own key in control - weight dict with weight for each label and overall weight.For example, - if bounding box annotation with control tag named my_bbox should be - included with 0.33 weight in agreement calculation, and the first - label Car should be twice more important than Airplaine, then you have - to need the specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': - {'Car': 1.0, 'Airplaine': 0.5}, 'overall': 0.33} + control tag (e.g. label or choice) will have its own key in control + weight dict with weight for each label and overall weight. For + example, if a bounding box annotation with a control tag named my_bbox + should be included with 0.33 weight in agreement calculation, and the + first label Car should be twice as important as Airplane, then you + need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': + {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} parsed_label_config: type: optional> docs: JSON-formatted labeling configuration @@ -1932,6 +1932,14 @@ types: docs: User who created the last annotation history item source: openapi: openapi/openapi.yaml + DataManagerTaskSerializerPredictionsItemModelRun: + discriminated: false + union: + - map + - integer + source: + openapi: openapi/openapi.yaml + inline: true DataManagerTaskSerializerPredictionsItem: properties: result: @@ -1943,7 +1951,7 @@ types: model: type: optional> model_run: - type: optional> + type: optional task: type: optional project: @@ -1974,6 +1982,15 @@ types: source: openapi: openapi/openapi.yaml inline: true + DataManagerTaskSerializerCommentAuthorsItem: + discriminated: false + docs: Users who wrote comments + union: + - integer + - map + source: + openapi: openapi/openapi.yaml + inline: true DataManagerTaskSerializer: properties: id: @@ -2065,7 +2082,7 @@ types: type: optional docs: Project ID for this task comment_authors: - type: optional> + type: optional> docs: Users who wrote comments source: openapi: openapi/openapi.yaml diff --git a/.mock/definition/files.yml b/.mock/definition/files.yml index b76af3f86..f5153543c 100644 --- a/.mock/definition/files.yml +++ b/.mock/definition/files.yml @@ -64,9 +64,9 @@ service: ```bash - curl -H 'Authorization: Token abc123' \ -X POST + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/import/file-upload/245' -F - ‘file=@path/to/my_file.csv’ + 'file=@path/to/my_file.csv' ``` source: diff --git a/.mock/definition/projects.yml b/.mock/definition/projects.yml index 4c83362f7..f3ac39644 100644 --- a/.mock/definition/projects.yml +++ b/.mock/definition/projects.yml @@ -62,7 +62,7 @@ types: default: '#FFFFFF' control_weights: type: optional> - docs: >- + docs: > Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For @@ -70,7 +70,7 @@ types: should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': - {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} source: openapi: openapi/openapi.yaml ProjectsUpdateResponse: @@ -123,7 +123,7 @@ types: default: '#FFFFFF' control_weights: type: optional> - docs: >- + docs: > Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For @@ -131,7 +131,7 @@ types: should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': - {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} source: openapi: openapi/openapi.yaml ProjectsImportTasksResponse: @@ -359,7 +359,7 @@ service: default: '#FFFFFF' control_weights: type: optional> - docs: >- + docs: > Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall @@ -368,7 +368,7 @@ service: agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': - 0.5}, 'overall': 0.33} + 0.5}, 'overall': 0.33}} workspace: type: optional docs: Workspace ID @@ -628,7 +628,7 @@ service: default: '#FFFFFF' control_weights: type: optional> - docs: >- + docs: > Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall @@ -637,7 +637,7 @@ service: agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': - 0.5}, 'overall': 0.33} + 0.5}, 'overall': 0.33}} workspace: type: optional docs: Workspace ID @@ -707,7 +707,7 @@ service: There are three possible ways to import tasks with this endpoint: - #### 1\. **POST with data** + #### 1. **POST with data** Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. @@ -722,15 +722,13 @@ service: ```bash curl -H 'Content-Type: application/json' -H 'Authorization: Token - abc123' \ - - -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": - "Some text 1"}, {"text": "Some text 2"}]' + abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data + '[{"text": "Some text 1"}, {"text": "Some text 2"}]' ``` - #### 2\. **POST with files** + #### 2. **POST with files** Send tasks as files. You can attach multiple files with different names. @@ -753,15 +751,14 @@ service: ```bash - curl -H 'Authorization: Token abc123' \ - - -X POST 'https://localhost:8080/api/projects/1/import' -F - ‘file=@path/to/my_file.csv’ + curl -H 'Authorization: Token abc123' -X POST + 'https://localhost:8080/api/projects/1/import' -F + 'file=@path/to/my_file.csv' ``` - #### 3\. **POST with URL** + #### 3. **POST with URL** You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. @@ -770,11 +767,8 @@ service: ```bash curl -H 'Content-Type: application/json' -H 'Authorization: Token - abc123' \ - - -X POST 'https://localhost:8080/api/projects/1/import' \ - - --data '[{"url": "http://example.com/test1.csv"}, {"url": + abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data + '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' ``` diff --git a/.mock/definition/projects/exports.yml b/.mock/definition/projects/exports.yml index e3ab85cf6..5a9786901 100644 --- a/.mock/definition/projects/exports.yml +++ b/.mock/definition/projects/exports.yml @@ -43,7 +43,7 @@ service: ```bash curl -X GET - https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H + "https://localhost:8080/api/projects/{id}/export?ids[]=123&ids[]=345" -H 'Authorization: Token abc123' --output 'annotations.json' ``` diff --git a/.mock/definition/prompts.yml b/.mock/definition/prompts.yml index 4dfce7746..5727b2fce 100644 --- a/.mock/definition/prompts.yml +++ b/.mock/definition/prompts.yml @@ -187,6 +187,10 @@ service: display-name: Create batch predictions request: name: PromptsBatchPredictionsRequest + query-parameters: + num_predictions: + type: optional + docs: Number of predictions being sent body: properties: modelrun_id: @@ -216,6 +220,10 @@ service: display-name: Create batch of failed predictions request: name: PromptsBatchFailedPredictionsRequest + query-parameters: + num_failed_predictions: + type: optional + docs: Number of failed predictions being sent body: properties: modelrun_id: diff --git a/.mock/fern.config.json b/.mock/fern.config.json index b26fef150..6dc5e9e34 100644 --- a/.mock/fern.config.json +++ b/.mock/fern.config.json @@ -1,4 +1,4 @@ { "organization" : "humansignal-org", - "version" : "0.62.4" + "version" : "0.65.7" } \ No newline at end of file diff --git a/.mock/openapi/openapi.yaml b/.mock/openapi/openapi.yaml index 1a6d8cc4f..8f5d43101 100644 --- a/.mock/openapi/openapi.yaml +++ b/.mock/openapi/openapi.yaml @@ -1473,9 +1473,9 @@ paths: ```bash - curl -X POST -H 'Content-type: application/json' https://localhost:8080/api/ml -H 'Authorization: Token abc123'\ + curl -X POST -H 'Content-type: application/json' https://localhost:8080/api/ml -H 'Authorization: Token abc123' --data '{"url": "http://localhost:9090", "project": {project_id}}' - --data '{"url": "http://localhost:9090", "project": {project_id}}' + ``` requestBody: $ref: "#/components/requestBodies/api_ml_createData" responses: @@ -2396,15 +2396,16 @@ paths: default: "#FFFFFF" control_weights: title: control_weights - description: "Dict of weights for each control tag in metric calculation. Each - control tag (e.g. label or choice) will have its own key - in control weight dict with weight for each label and - overall weight. For example, if a bounding box annotation - with a control tag named my_bbox should be included with - 0.33 weight in agreement calculation, and the first label - Car should be twice as important as Airplane, then you - need to specify: {'my_bbox': {'type': 'RectangleLabels', - 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}" + description: > + Dict of weights for each control tag in metric calculation. Each + control tag (e.g. label or choice) will have its own key in + control weight dict with weight for each label and overall + weight. For example, if a bounding box annotation with a + control tag named my_bbox should be included with 0.33 weight + in agreement calculation, and the first label Car should be + twice as important as Airplane, then you need to specify: + {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, + 'Airplane': 0.5}, 'overall': 0.33}} type: object example: my_bbox: @@ -2586,15 +2587,16 @@ paths: default: "#FFFFFF" control_weights: title: control_weights - description: "Dict of weights for each control tag in metric calculation. Each - control tag (e.g. label or choice) will have its own key - in control weight dict with weight for each label and - overall weight. For example, if a bounding box annotation - with a control tag named my_bbox should be included with - 0.33 weight in agreement calculation, and the first label - Car should be twice as important as Airplane, then you - need to specify: {'my_bbox': {'type': 'RectangleLabels', - 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}" + description: > + Dict of weights for each control tag in metric calculation. Each + control tag (e.g. label or choice) will have its own key in + control weight dict with weight for each label and overall + weight. For example, if a bounding box annotation with a + control tag named my_bbox should be included with 0.33 weight + in agreement calculation, and the first label Car should be + twice as important as Airplane, then you need to specify: + {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, + 'Airplane': 0.5}, 'overall': 0.33}} type: object example: my_bbox: @@ -2721,7 +2723,7 @@ paths: There are three possible ways to import tasks with this endpoint: - ### 1\. **POST with data** + ### 1. **POST with data** Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. @@ -2732,14 +2734,12 @@ paths: ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - - -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' ``` - ### 2\. **POST with files** + ### 2. **POST with files** Send tasks as files. You can attach multiple files with different names. @@ -2760,25 +2760,19 @@ paths: ```bash - curl -H 'Authorization: Token abc123' \ - - -X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' -F 'file=@path/to/my_file.csv' ``` - ### 3\. **POST with URL** + ### 3. **POST with URL** You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - - -X POST 'https://localhost:8080/api/projects/1/import' \ - - --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' ``` @@ -6700,7 +6694,8 @@ components: default: "#FFFFFF" control_weights: title: control_weights - description: "Dict of weights for each control tag in metric calculation. Each + description: > + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a @@ -6708,7 +6703,7 @@ components: in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, - 'Airplane': 0.5}, 'overall': 0.33}" + 'Airplane': 0.5}, 'overall': 0.33}} type: object example: my_bbox: @@ -8641,15 +8636,16 @@ components: nullable: true control_weights: title: Control weights - description: "Dict of weights for each control tag in metric calculation. Each - control tag (e.g. label or choice) will have it's own key in control - weight dict with weight for each label and overall weight.For - example, if bounding box annotation with control tag named my_bbox - should be included with 0.33 weight in agreement calculation, and - the first label Car should be twice more important than Airplaine, - then you have to need the specify: {'my_bbox': {'type': - 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplaine': 0.5}, - 'overall': 0.33}" + description: > + Dict of weights for each control tag in metric calculation. Each + control tag (e.g. label or choice) will have its own key in + control weight dict with weight for each label and overall + weight. For example, if a bounding box annotation with a + control tag named my_bbox should be included with 0.33 weight + in agreement calculation, and the first label Car should be + twice as important as Airplane, then you need to specify: + {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, + 'Airplane': 0.5}, 'overall': 0.33}} type: object nullable: true parsed_label_config: @@ -10283,7 +10279,9 @@ components: type: object title: ML Backend instance model_run: - type: object + oneOf: + - type: object + - type: integer title: Model Run instance task: type: integer @@ -10452,7 +10450,9 @@ components: type: array items: description: Users who wrote comments - type: integer + oneOf: + - type: integer + - type: object uniqueItems: true Webhook: required: diff --git a/poetry.lock b/poetry.lock index 3a545a803..94cec6c7e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -125,13 +125,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2025.4.26" +version = "2025.7.9" description = "Python package for providing Mozilla's CA Bundle." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, - {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, + {file = "certifi-2025.7.9-py3-none-any.whl", hash = "sha256:d842783a14f8fdd646895ac26f719a061408834473cfc10203f6a575beb15d39"}, + {file = "certifi-2025.7.9.tar.gz", hash = "sha256:c1d2ec05395148ee10cf672ffc28cd37ea0ab0d99f9cc74c43e588cbd111b079"}, ] [[package]] @@ -348,13 +348,13 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "37.3.0" +version = "37.4.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.9" files = [ - {file = "faker-37.3.0-py3-none-any.whl", hash = "sha256:48c94daa16a432f2d2bc803c7ff602509699fca228d13e97e379cd860a7e216e"}, - {file = "faker-37.3.0.tar.gz", hash = "sha256:77b79e7a2228d57175133af0bbcdd26dc623df81db390ee52f5104d46c010f2f"}, + {file = "faker-37.4.0-py3-none-any.whl", hash = "sha256:cb81c09ebe06c32a10971d1bbdb264bb0e22b59af59548f011ac4809556ce533"}, + {file = "faker-37.4.0.tar.gz", hash = "sha256:7f69d579588c23d5ce671f3fa872654ede0e67047820255f43a4aa1925b89780"}, ] [package.dependencies] @@ -594,13 +594,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "joblib" -version = "1.5.0" +version = "1.5.1" description = "Lightweight pipelining with Python functions" optional = false python-versions = ">=3.9" files = [ - {file = "joblib-1.5.0-py3-none-any.whl", hash = "sha256:206144b320246485b712fc8cc51f017de58225fa8b414a1fe1764a7231aca491"}, - {file = "joblib-1.5.0.tar.gz", hash = "sha256:d8757f955389a3dd7a23152e43bc297c2e0c2d3060056dad0feefc88a06939b5"}, + {file = "joblib-1.5.1-py3-none-any.whl", hash = "sha256:4719a31f054c7d766948dcd83e9613686b27114f190f717cec7eaa2084f8a74a"}, + {file = "joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444"}, ] [[package]] @@ -627,13 +627,13 @@ cli = ["typer (>=0.7.0)"] [[package]] name = "jsonschema" -version = "4.23.0" +version = "4.24.0" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, + {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, + {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, ] [package.dependencies] @@ -662,143 +662,105 @@ referencing = ">=0.31.0" [[package]] name = "lxml" -version = "5.4.0" +version = "6.0.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"}, - {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"}, - {file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"}, - {file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"}, - {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"}, - {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"}, - {file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"}, - {file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"}, - {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"}, - {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"}, - {file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"}, - {file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"}, - {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"}, - {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"}, - {file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"}, - {file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"}, - {file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"}, - {file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"}, - {file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"}, - {file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"}, - {file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"}, - {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"}, - {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"}, - {file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"}, - {file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"}, - {file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"}, - {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"}, - {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"}, - {file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"}, - {file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"}, - {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"}, - {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"}, - {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"}, - {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"}, - {file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"}, - {file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"}, - {file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"}, + {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"}, + {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2793a627e95d119e9f1e19720730472f5543a6d84c50ea33313ce328d870f2dd"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46b9ed911f36bfeb6338e0b482e7fe7c27d362c52fde29f221fddbc9ee2227e7"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b4790b558bee331a933e08883c423f65bbcd07e278f91b2272489e31ab1e2b4"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2030956cf4886b10be9a0285c6802e078ec2391e1dd7ff3eb509c2c95a69b76"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23854ecf381ab1facc8f353dcd9adeddef3652268ee75297c1164c987c11dc"}, + {file = "lxml-6.0.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:43fe5af2d590bf4691531b1d9a2495d7aab2090547eaacd224a3afec95706d76"}, + {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74e748012f8c19b47f7d6321ac929a9a94ee92ef12bc4298c47e8b7219b26541"}, + {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:43cfbb7db02b30ad3926e8fceaef260ba2fb7df787e38fa2df890c1ca7966c3b"}, + {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34190a1ec4f1e84af256495436b2d196529c3f2094f0af80202947567fdbf2e7"}, + {file = "lxml-6.0.0-cp310-cp310-win32.whl", hash = "sha256:5967fe415b1920a3877a4195e9a2b779249630ee49ece22021c690320ff07452"}, + {file = "lxml-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f3389924581d9a770c6caa4df4e74b606180869043b9073e2cec324bad6e306e"}, + {file = "lxml-6.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:522fe7abb41309e9543b0d9b8b434f2b630c5fdaf6482bee642b34c8c70079c8"}, + {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36"}, + {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e"}, + {file = "lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58"}, + {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2"}, + {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851"}, + {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f"}, + {file = "lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c"}, + {file = "lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816"}, + {file = "lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab"}, + {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108"}, + {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"}, + {file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"}, + {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"}, + {file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"}, + {file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"}, + {file = "lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb"}, + {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da"}, + {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"}, + {file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"}, + {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"}, + {file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"}, + {file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"}, + {file = "lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03"}, + {file = "lxml-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4eb114a0754fd00075c12648d991ec7a4357f9cb873042cc9a77bf3a7e30c9db"}, + {file = "lxml-6.0.0-cp38-cp38-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:7da298e1659e45d151b4028ad5c7974917e108afb48731f4ed785d02b6818994"}, + {file = "lxml-6.0.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bf61bc4345c1895221357af8f3e89f8c103d93156ef326532d35c707e2fb19d"}, + {file = "lxml-6.0.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63b634facdfbad421d4b61c90735688465d4ab3a8853ac22c76ccac2baf98d97"}, + {file = "lxml-6.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e380e85b93f148ad28ac15f8117e2fd8e5437aa7732d65e260134f83ce67911b"}, + {file = "lxml-6.0.0-cp38-cp38-win32.whl", hash = "sha256:185efc2fed89cdd97552585c624d3c908f0464090f4b91f7d92f8ed2f3b18f54"}, + {file = "lxml-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:f97487996a39cb18278ca33f7be98198f278d0bc3c5d0fd4d7b3d63646ca3c8a"}, + {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85b14a4689d5cff426c12eefe750738648706ea2753b20c2f973b2a000d3d261"}, + {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f64ccf593916e93b8d36ed55401bb7fe9c7d5de3180ce2e10b08f82a8f397316"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:b372d10d17a701b0945f67be58fae4664fd056b85e0ff0fbc1e6c951cdbc0512"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a674c0948789e9136d69065cc28009c1b1874c6ea340253db58be7622ce6398f"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:edf6e4c8fe14dfe316939711e3ece3f9a20760aabf686051b537a7562f4da91a"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:048a930eb4572829604982e39a0c7289ab5dc8abc7fc9f5aabd6fbc08c154e93"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b5fa5eda84057a4f1bbb4bb77a8c28ff20ae7ce211588d698ae453e13c6281"}, + {file = "lxml-6.0.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:c352fc8f36f7e9727db17adbf93f82499457b3d7e5511368569b4c5bd155a922"}, + {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8db5dc617cb937ae17ff3403c3a70a7de9df4852a046f93e71edaec678f721d0"}, + {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:2181e4b1d07dde53986023482673c0f1fba5178ef800f9ab95ad791e8bdded6a"}, + {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3c98d5b24c6095e89e03d65d5c574705be3d49c0d8ca10c17a8a4b5201b72f5"}, + {file = "lxml-6.0.0-cp39-cp39-win32.whl", hash = "sha256:04d67ceee6db4bcb92987ccb16e53bef6b42ced872509f333c04fb58a3315256"}, + {file = "lxml-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0b1520ef900e9ef62e392dd3d7ae4f5fa224d1dd62897a792cf353eb20b6cae"}, + {file = "lxml-6.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:e35e8aaaf3981489f42884b59726693de32dabfc438ac10ef4eb3409961fd402"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:dbdd7679a6f4f08152818043dbb39491d1af3332128b3752c3ec5cebc0011a72"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40442e2a4456e9910875ac12951476d36c0870dcb38a68719f8c4686609897c4"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db0efd6bae1c4730b9c863fc4f5f3c0fa3e8f05cae2c44ae141cb9dfc7d091dc"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab542c91f5a47aaa58abdd8ea84b498e8e49fe4b883d67800017757a3eb78e8"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:013090383863b72c62a702d07678b658fa2567aa58d373d963cca245b017e065"}, + {file = "lxml-6.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c86df1c9af35d903d2b52d22ea3e66db8058d21dc0f59842ca5deb0595921141"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4337e4aec93b7c011f7ee2e357b0d30562edd1955620fdd4aeab6aacd90d43c5"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ae74f7c762270196d2dda56f8dd7309411f08a4084ff2dfcc0b095a218df2e06"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:059c4cbf3973a621b62ea3132934ae737da2c132a788e6cfb9b08d63a0ef73f9"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f090a9bc0ce8da51a5632092f98a7e7f84bca26f33d161a98b57f7fb0004ca"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9da022c14baeec36edfcc8daf0e281e2f55b950249a455776f0d1adeeada4734"}, + {file = "lxml-6.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a55da151d0b0c6ab176b4e761670ac0e2667817a1e0dadd04a01d0561a219349"}, + {file = "lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72"}, ] [package.extras] @@ -806,7 +768,6 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml_html_clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.11,<3.1.0)"] [[package]] name = "markupsafe" @@ -880,48 +841,55 @@ files = [ [[package]] name = "mypy" -version = "1.0.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, - {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, - {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, - {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, - {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, - {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, - {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, - {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, - {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, - {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, - {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, - {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, - {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, - {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, - {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, - {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, - {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, - {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, - {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, - {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1016,29 +984,22 @@ files = [ [[package]] name = "opencv-python" -version = "4.11.0.86" +version = "4.12.0.88" description = "Wrapper package for OpenCV python bindings." optional = false python-versions = ">=3.6" files = [ - {file = "opencv-python-4.11.0.86.tar.gz", hash = "sha256:03d60ccae62304860d232272e4a4fda93c39d595780cb40b161b310244b736a4"}, - {file = "opencv_python-4.11.0.86-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:432f67c223f1dc2824f5e73cdfcd9db0efc8710647d4e813012195dc9122a52a"}, - {file = "opencv_python-4.11.0.86-cp37-abi3-macosx_13_0_x86_64.whl", hash = "sha256:9d05ef13d23fe97f575153558653e2d6e87103995d54e6a35db3f282fe1f9c66"}, - {file = "opencv_python-4.11.0.86-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b92ae2c8852208817e6776ba1ea0d6b1e0a1b5431e971a2a0ddd2a8cc398202"}, - {file = "opencv_python-4.11.0.86-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b02611523803495003bd87362db3e1d2a0454a6a63025dc6658a9830570aa0d"}, - {file = "opencv_python-4.11.0.86-cp37-abi3-win32.whl", hash = "sha256:810549cb2a4aedaa84ad9a1c92fbfdfc14090e2749cedf2c1589ad8359aa169b"}, - {file = "opencv_python-4.11.0.86-cp37-abi3-win_amd64.whl", hash = "sha256:085ad9b77c18853ea66283e98affefe2de8cc4c1f43eda4c100cf9b2721142ec"}, + {file = "opencv-python-4.12.0.88.tar.gz", hash = "sha256:8b738389cede219405f6f3880b851efa3415ccd674752219377353f017d2994d"}, + {file = "opencv_python-4.12.0.88-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:f9a1f08883257b95a5764bf517a32d75aec325319c8ed0f89739a57fae9e92a5"}, + {file = "opencv_python-4.12.0.88-cp37-abi3-macosx_13_0_x86_64.whl", hash = "sha256:812eb116ad2b4de43ee116fcd8991c3a687f099ada0b04e68f64899c09448e81"}, + {file = "opencv_python-4.12.0.88-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:51fd981c7df6af3e8f70b1556696b05224c4e6b6777bdd2a46b3d4fb09de1a92"}, + {file = "opencv_python-4.12.0.88-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:092c16da4c5a163a818f120c22c5e4a2f96e0db4f24e659c701f1fe629a690f9"}, + {file = "opencv_python-4.12.0.88-cp37-abi3-win32.whl", hash = "sha256:ff554d3f725b39878ac6a2e1fa232ec509c36130927afc18a1719ebf4fbf4357"}, + {file = "opencv_python-4.12.0.88-cp37-abi3-win_amd64.whl", hash = "sha256:d98edb20aa932fd8ebd276a72627dad9dc097695b3d435a4257557bbb49a79d2"}, ] [package.dependencies] -numpy = [ - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, - {version = ">=1.23.5", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, - {version = ">=1.21.0", markers = "python_version == \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\""}, - {version = ">=1.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"aarch64\" and python_version >= \"3.8\" and python_version < \"3.10\" or python_version > \"3.9\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_system != \"Darwin\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, - {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, - {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, -] +numpy = {version = ">=2,<2.3.0", markers = "python_version >= \"3.9\""} [[package]] name = "packaging" @@ -1053,53 +1014,53 @@ files = [ [[package]] name = "pandas" -version = "2.2.3" +version = "2.3.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, + {file = "pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22c2e866f7209ebc3a8f08d75766566aae02bcc91d196935a1d9e59c7b990ac9"}, + {file = "pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3583d348546201aff730c8c47e49bc159833f971c2899d6097bce68b9112a4f1"}, + {file = "pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f951fbb702dacd390561e0ea45cdd8ecfa7fb56935eb3dd78e306c19104b9b0"}, + {file = "pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd05b72ec02ebfb993569b4931b2e16fbb4d6ad6ce80224a3ee838387d83a191"}, + {file = "pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1"}, + {file = "pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97"}, + {file = "pandas-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83"}, + {file = "pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2b0540963d83431f5ce8870ea02a7430adca100cec8a050f0811f8e31035541b"}, + {file = "pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fe7317f578c6a153912bd2292f02e40c1d8f253e93c599e82620c7f69755c74f"}, + {file = "pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6723a27ad7b244c0c79d8e7007092d7c8f0f11305770e2f4cd778b3ad5f9f85"}, + {file = "pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3462c3735fe19f2638f2c3a40bd94ec2dc5ba13abbb032dd2fa1f540a075509d"}, + {file = "pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:98bcc8b5bf7afed22cc753a28bc4d9e26e078e777066bc53fac7904ddef9a678"}, + {file = "pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d544806b485ddf29e52d75b1f559142514e60ef58a832f74fb38e48d757b299"}, + {file = "pandas-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b3cd4273d3cb3707b6fffd217204c52ed92859533e31dc03b7c5008aa933aaab"}, + {file = "pandas-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:689968e841136f9e542020698ee1c4fbe9caa2ed2213ae2388dc7b81721510d3"}, + {file = "pandas-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:025e92411c16cbe5bb2a4abc99732a6b132f439b8aab23a59fa593eb00704232"}, + {file = "pandas-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b7ff55f31c4fcb3e316e8f7fa194566b286d6ac430afec0d461163312c5841e"}, + {file = "pandas-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dcb79bf373a47d2a40cf7232928eb7540155abbc460925c2c96d2d30b006eb4"}, + {file = "pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8"}, + {file = "pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679"}, + {file = "pandas-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8"}, + {file = "pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22"}, + {file = "pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a"}, + {file = "pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928"}, + {file = "pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9"}, + {file = "pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12"}, + {file = "pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb"}, + {file = "pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956"}, + {file = "pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a"}, + {file = "pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9"}, + {file = "pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275"}, + {file = "pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab"}, + {file = "pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96"}, + {file = "pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444"}, + {file = "pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4645f770f98d656f11c69e81aeb21c6fca076a44bed3dcbb9396a4311bc7f6d8"}, + {file = "pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:342e59589cc454aaff7484d75b816a433350b3d7964d7847327edda4d532a2e3"}, + {file = "pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d12f618d80379fde6af007f65f0c25bd3e40251dbd1636480dfffce2cf1e6da"}, + {file = "pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd71c47a911da120d72ef173aeac0bf5241423f9bfea57320110a978457e069e"}, + {file = "pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09e3b1587f0f3b0913e21e8b32c3119174551deb4a4eba4a89bc7377947977e7"}, + {file = "pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2323294c73ed50f612f67e2bf3ae45aea04dce5690778e08a09391897f35ff88"}, + {file = "pandas-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4b0de34dc8499c2db34000ef8baad684cfa4cbd836ecee05f323ebfba348c7d"}, + {file = "pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2"}, ] [package.dependencies] @@ -1150,100 +1111,125 @@ files = [ [[package]] name = "pillow" -version = "11.2.1" +version = "11.3.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" files = [ - {file = "pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047"}, - {file = "pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d"}, - {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97"}, - {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579"}, - {file = "pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d"}, - {file = "pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad"}, - {file = "pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2"}, - {file = "pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70"}, - {file = "pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788"}, - {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e"}, - {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e"}, - {file = "pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6"}, - {file = "pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193"}, - {file = "pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7"}, - {file = "pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f"}, - {file = "pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4"}, - {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443"}, - {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c"}, - {file = "pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3"}, - {file = "pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941"}, - {file = "pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb"}, - {file = "pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28"}, - {file = "pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155"}, - {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14"}, - {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b"}, - {file = "pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2"}, - {file = "pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691"}, - {file = "pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c"}, - {file = "pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22"}, - {file = "pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91"}, - {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751"}, - {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9"}, - {file = "pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd"}, - {file = "pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e"}, - {file = "pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681"}, - {file = "pillow-11.2.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8"}, - {file = "pillow-11.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb"}, - {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a"}, - {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36"}, - {file = "pillow-11.2.1-cp39-cp39-win32.whl", hash = "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67"}, - {file = "pillow-11.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1"}, - {file = "pillow-11.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044"}, - {file = "pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6"}, + {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, + {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}, + {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}, + {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"}, + {file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"}, + {file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"}, + {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}, + {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}, + {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}, + {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}, + {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"}, + {file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"}, + {file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"}, + {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}, + {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"}, + {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"}, + {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"}, + {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"}, + {file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"}, + {file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"}, + {file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"}, + {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"}, + {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"}, + {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"}, + {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"}, + {file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"}, + {file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"}, + {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"}, + {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"}, + {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"}, + {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"}, + {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"}, + {file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"}, + {file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"}, + {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"}, + {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}, + {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}, + {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"}, + {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"}, + {file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"}, + {file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"}, + {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"}, + {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"}, + {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"}, + {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"}, + {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"}, + {file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"}, + {file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"}, + {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"}, + {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}, + {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}, + {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}, + {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"}, + {file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"}, + {file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"}, + {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, + {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] test-arrow = ["pyarrow"] -tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"] typing = ["typing-extensions"] xmp = ["defusedxml"] @@ -1280,13 +1266,13 @@ testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.11.4" +version = "2.11.7" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" files = [ - {file = "pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"}, - {file = "pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d"}, + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, ] [package.dependencies] @@ -1676,18 +1662,18 @@ files = [ [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -1728,128 +1714,155 @@ httpx = ">=0.25.0" [[package]] name = "rpds-py" -version = "0.25.1" +version = "0.26.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, - {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da"}, - {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54"}, - {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2"}, - {file = "rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24"}, - {file = "rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a"}, - {file = "rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d"}, - {file = "rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd"}, - {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d"}, - {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042"}, - {file = "rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc"}, - {file = "rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4"}, - {file = "rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4"}, - {file = "rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c"}, - {file = "rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea"}, - {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd"}, - {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb"}, - {file = "rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe"}, - {file = "rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192"}, - {file = "rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728"}, - {file = "rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559"}, - {file = "rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325"}, - {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98"}, - {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd"}, - {file = "rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31"}, - {file = "rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500"}, - {file = "rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5"}, - {file = "rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129"}, - {file = "rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194"}, - {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72"}, - {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66"}, - {file = "rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523"}, - {file = "rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763"}, - {file = "rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd"}, - {file = "rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9"}, - {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451"}, - {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f"}, - {file = "rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449"}, - {file = "rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11"}, - {file = "rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf"}, - {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992"}, - {file = "rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793"}, - {file = "rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3"}, + {file = "rpds_py-0.26.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4c70c70f9169692b36307a95f3d8c0a9fcd79f7b4a383aad5eaa0e9718b79b37"}, + {file = "rpds_py-0.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:777c62479d12395bfb932944e61e915741e364c843afc3196b694db3d669fcd0"}, + {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec671691e72dff75817386aa02d81e708b5a7ec0dec6669ec05213ff6b77e1bd"}, + {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a1cb5d6ce81379401bbb7f6dbe3d56de537fb8235979843f0d53bc2e9815a79"}, + {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f789e32fa1fb6a7bf890e0124e7b42d1e60d28ebff57fe806719abb75f0e9a3"}, + {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c55b0a669976cf258afd718de3d9ad1b7d1fe0a91cd1ab36f38b03d4d4aeaaf"}, + {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70d9ec912802ecfd6cd390dadb34a9578b04f9bcb8e863d0a7598ba5e9e7ccc"}, + {file = "rpds_py-0.26.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3021933c2cb7def39d927b9862292e0f4c75a13d7de70eb0ab06efed4c508c19"}, + {file = "rpds_py-0.26.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a7898b6ca3b7d6659e55cdac825a2e58c638cbf335cde41f4619e290dd0ad11"}, + {file = "rpds_py-0.26.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:12bff2ad9447188377f1b2794772f91fe68bb4bbfa5a39d7941fbebdbf8c500f"}, + {file = "rpds_py-0.26.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:191aa858f7d4902e975d4cf2f2d9243816c91e9605070aeb09c0a800d187e323"}, + {file = "rpds_py-0.26.0-cp310-cp310-win32.whl", hash = "sha256:b37a04d9f52cb76b6b78f35109b513f6519efb481d8ca4c321f6a3b9580b3f45"}, + {file = "rpds_py-0.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:38721d4c9edd3eb6670437d8d5e2070063f305bfa2d5aa4278c51cedcd508a84"}, + {file = "rpds_py-0.26.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9e8cb77286025bdb21be2941d64ac6ca016130bfdcd228739e8ab137eb4406ed"}, + {file = "rpds_py-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e09330b21d98adc8ccb2dbb9fc6cb434e8908d4c119aeaa772cb1caab5440a0"}, + {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9c1b92b774b2e68d11193dc39620d62fd8ab33f0a3c77ecdabe19c179cdbc1"}, + {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:824e6d3503ab990d7090768e4dfd9e840837bae057f212ff9f4f05ec6d1975e7"}, + {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ad7fd2258228bf288f2331f0a6148ad0186b2e3643055ed0db30990e59817a6"}, + {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dc23bbb3e06ec1ea72d515fb572c1fea59695aefbffb106501138762e1e915e"}, + {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80bf832ac7b1920ee29a426cdca335f96a2b5caa839811803e999b41ba9030d"}, + {file = "rpds_py-0.26.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0919f38f5542c0a87e7b4afcafab6fd2c15386632d249e9a087498571250abe3"}, + {file = "rpds_py-0.26.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d422b945683e409000c888e384546dbab9009bb92f7c0b456e217988cf316107"}, + {file = "rpds_py-0.26.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a7711fa562ba2da1aa757e11024ad6d93bad6ad7ede5afb9af144623e5f76a"}, + {file = "rpds_py-0.26.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238e8c8610cb7c29460e37184f6799547f7e09e6a9bdbdab4e8edb90986a2318"}, + {file = "rpds_py-0.26.0-cp311-cp311-win32.whl", hash = "sha256:893b022bfbdf26d7bedb083efeea624e8550ca6eb98bf7fea30211ce95b9201a"}, + {file = "rpds_py-0.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:87a5531de9f71aceb8af041d72fc4cab4943648d91875ed56d2e629bef6d4c03"}, + {file = "rpds_py-0.26.0-cp311-cp311-win_arm64.whl", hash = "sha256:de2713f48c1ad57f89ac25b3cb7daed2156d8e822cf0eca9b96a6f990718cc41"}, + {file = "rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d"}, + {file = "rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136"}, + {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582"}, + {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e"}, + {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15"}, + {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8"}, + {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a"}, + {file = "rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323"}, + {file = "rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158"}, + {file = "rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3"}, + {file = "rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2"}, + {file = "rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44"}, + {file = "rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c"}, + {file = "rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8"}, + {file = "rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d"}, + {file = "rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1"}, + {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e"}, + {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1"}, + {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9"}, + {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7"}, + {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04"}, + {file = "rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1"}, + {file = "rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9"}, + {file = "rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9"}, + {file = "rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba"}, + {file = "rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b"}, + {file = "rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5"}, + {file = "rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256"}, + {file = "rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618"}, + {file = "rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35"}, + {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f"}, + {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83"}, + {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1"}, + {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8"}, + {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f"}, + {file = "rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed"}, + {file = "rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632"}, + {file = "rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c"}, + {file = "rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0"}, + {file = "rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9"}, + {file = "rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9"}, + {file = "rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a"}, + {file = "rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf"}, + {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12"}, + {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20"}, + {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331"}, + {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f"}, + {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246"}, + {file = "rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387"}, + {file = "rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af"}, + {file = "rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33"}, + {file = "rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953"}, + {file = "rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9"}, + {file = "rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37"}, + {file = "rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867"}, + {file = "rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da"}, + {file = "rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7"}, + {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad"}, + {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d"}, + {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca"}, + {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19"}, + {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8"}, + {file = "rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b"}, + {file = "rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a"}, + {file = "rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170"}, + {file = "rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e"}, + {file = "rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f"}, + {file = "rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7"}, + {file = "rpds_py-0.26.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:7a48af25d9b3c15684059d0d1fc0bc30e8eee5ca521030e2bffddcab5be40226"}, + {file = "rpds_py-0.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c71c2f6bf36e61ee5c47b2b9b5d47e4d1baad6426bfed9eea3e858fc6ee8806"}, + {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d815d48b1804ed7867b539236b6dd62997850ca1c91cad187f2ddb1b7bbef19"}, + {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84cfbd4d4d2cdeb2be61a057a258d26b22877266dd905809e94172dff01a42ae"}, + {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbaa70553ca116c77717f513e08815aec458e6b69a028d4028d403b3bc84ff37"}, + {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39bfea47c375f379d8e87ab4bb9eb2c836e4f2069f0f65731d85e55d74666387"}, + {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1533b7eb683fb5f38c1d68a3c78f5fdd8f1412fa6b9bf03b40f450785a0ab915"}, + {file = "rpds_py-0.26.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c5ab0ee51f560d179b057555b4f601b7df909ed31312d301b99f8b9fc6028284"}, + {file = "rpds_py-0.26.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e5162afc9e0d1f9cae3b577d9c29ddbab3505ab39012cb794d94a005825bde21"}, + {file = "rpds_py-0.26.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:43f10b007033f359bc3fa9cd5e6c1e76723f056ffa9a6b5c117cc35720a80292"}, + {file = "rpds_py-0.26.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e3730a48e5622e598293eee0762b09cff34dd3f271530f47b0894891281f051d"}, + {file = "rpds_py-0.26.0-cp39-cp39-win32.whl", hash = "sha256:4b1f66eb81eab2e0ff5775a3a312e5e2e16bf758f7b06be82fb0d04078c7ac51"}, + {file = "rpds_py-0.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:519067e29f67b5c90e64fb1a6b6e9d2ec0ba28705c51956637bac23a2f4ddae1"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3c0909c5234543ada2515c05dc08595b08d621ba919629e94427e8e03539c958"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c1fb0cda2abcc0ac62f64e2ea4b4e64c57dfd6b885e693095460c61bde7bb18e"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d142d2d6cf9b31c12aa4878d82ed3b2324226270b89b676ac62ccd7df52d08"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a547e21c5610b7e9093d870be50682a6a6cf180d6da0f42c47c306073bfdbbf6"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35e9a70a0f335371275cdcd08bc5b8051ac494dd58bff3bbfb421038220dc871"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dfa6115c6def37905344d56fb54c03afc49104e2ca473d5dedec0f6606913b4"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:313cfcd6af1a55a286a3c9a25f64af6d0e46cf60bc5798f1db152d97a216ff6f"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f7bf2496fa563c046d05e4d232d7b7fd61346e2402052064b773e5c378bf6f73"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:aa81873e2c8c5aa616ab8e017a481a96742fdf9313c40f14338ca7dbf50cb55f"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:68ffcf982715f5b5b7686bdd349ff75d422e8f22551000c24b30eaa1b7f7ae84"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6188de70e190847bb6db3dc3981cbadff87d27d6fe9b4f0e18726d55795cee9b"}, + {file = "rpds_py-0.26.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1c962145c7473723df9722ba4c058de12eb5ebedcb4e27e7d902920aa3831ee8"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f61a9326f80ca59214d1cceb0a09bb2ece5b2563d4e0cd37bfd5515c28510674"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:183f857a53bcf4b1b42ef0f57ca553ab56bdd170e49d8091e96c51c3d69ca696"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:941c1cfdf4799d623cf3aa1d326a6b4fdb7a5799ee2687f3516738216d2262fb"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72a8d9564a717ee291f554eeb4bfeafe2309d5ec0aa6c475170bdab0f9ee8e88"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:511d15193cbe013619dd05414c35a7dedf2088fcee93c6bbb7c77859765bd4e8"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aea1f9741b603a8d8fedb0ed5502c2bc0accbc51f43e2ad1337fe7259c2b77a5"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4019a9d473c708cf2f16415688ef0b4639e07abaa569d72f74745bbeffafa2c7"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:093d63b4b0f52d98ebae33b8c50900d3d67e0666094b1be7a12fffd7f65de74b"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2abe21d8ba64cded53a2a677e149ceb76dcf44284202d737178afe7ba540c1eb"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:4feb7511c29f8442cbbc28149a92093d32e815a28aa2c50d333826ad2a20fdf0"}, + {file = "rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e99685fc95d386da368013e7fb4269dd39c30d99f812a8372d62f244f662709c"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a90a13408a7a856b87be8a9f008fff53c5080eea4e4180f6c2e546e4a972fb5d"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ac51b65e8dc76cf4949419c54c5528adb24fc721df722fd452e5fbc236f5c40"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59b2093224a18c6508d95cfdeba8db9cbfd6f3494e94793b58972933fcee4c6d"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f01a5d6444a3258b00dc07b6ea4733e26f8072b788bef750baa37b370266137"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6e2c12160c72aeda9d1283e612f68804621f448145a210f1bf1d79151c47090"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb28c1f569f8d33b2b5dcd05d0e6ef7005d8639c54c2f0be824f05aedf715255"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1766b5724c3f779317d5321664a343c07773c8c5fd1532e4039e6cc7d1a815be"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b6d9e5a2ed9c4988c8f9b28b3bc0e3e5b1aaa10c28d210a594ff3a8c02742daf"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7a446ddaf6ca0fad9a5535b56fbfc29998bf0e0b450d174bbec0d600e1d72"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:eed5ac260dd545fbc20da5f4f15e7efe36a55e0e7cf706e4ec005b491a9546a0"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:582462833ba7cee52e968b0341b85e392ae53d44c0f9af6a5927c80e539a8b67"}, + {file = "rpds_py-0.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69a607203441e07e9a8a529cff1d5b73f6a160f22db1097211e6212a68567d11"}, + {file = "rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0"}, ] [[package]] @@ -1865,29 +1878,29 @@ files = [ [[package]] name = "ruff" -version = "0.5.7" +version = "0.11.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, - {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, - {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, - {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, - {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, - {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, - {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, + {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, + {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, + {file = "ruff-0.11.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4bfd80a6ec559a5eeb96c33f832418bf0fb96752de0539905cf7b0cc1d31d779"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0947c0a1afa75dcb5db4b34b070ec2bccee869d40e6cc8ab25aca11a7d527794"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad871ff74b5ec9caa66cb725b85d4ef89b53f8170f47c3406e32ef040400b038"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6cf918390cfe46d240732d4d72fa6e18e528ca1f60e318a10835cf2fa3dc19f"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56145ee1478582f61c08f21076dc59153310d606ad663acc00ea3ab5b2125f82"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5f66f8f1e8c9fc594cbd66fbc5f246a8d91f916cb9667e80208663ec3728304"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80b4df4d335a80315ab9afc81ed1cff62be112bd165e162b5eed8ac55bfc8470"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3068befab73620b8a0cc2431bd46b3cd619bc17d6f7695a3e1bb166b652c382a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f5da2e710a9641828e09aa98b92c9ebbc60518fdf3921241326ca3e8f8e55b8b"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ef39f19cb8ec98cbc762344921e216f3857a06c47412030374fffd413fb8fd3a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2a7cedf47244f431fd11aa5a7e2806dda2e0c365873bda7834e8f7d785ae159"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:81be52e7519f3d1a0beadcf8e974715b2dfc808ae8ec729ecfc79bddf8dbb783"}, + {file = "ruff-0.11.5-py3-none-win32.whl", hash = "sha256:e268da7b40f56e3eca571508a7e567e794f9bfcc0f412c4b607931d3af9c4afe"}, + {file = "ruff-0.11.5-py3-none-win_amd64.whl", hash = "sha256:6c6dc38af3cfe2863213ea25b6dc616d679205732dc0fb673356c2d69608f800"}, + {file = "ruff-0.11.5-py3-none-win_arm64.whl", hash = "sha256:67e241b4314f4eacf14a601d586026a962f4002a475aa702c69980a38087aa4e"}, + {file = "ruff-0.11.5.tar.gz", hash = "sha256:cae2e2439cb88853e421901ec040a758960b576126dab520fa08e9de431d1bef"}, ] [[package]] @@ -1903,13 +1916,13 @@ files = [ [[package]] name = "smart-open" -version = "7.1.0" -description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +version = "7.3.0.post1" +description = "Utils for streaming large files (S3, HDFS, GCS, SFTP, Azure Blob Storage, gzip, bz2, zst...)" optional = false -python-versions = "<4.0,>=3.7" +python-versions = "<4.0,>=3.8" files = [ - {file = "smart_open-7.1.0-py3-none-any.whl", hash = "sha256:4b8489bb6058196258bafe901730c7db0dcf4f083f316e97269c66f45502055b"}, - {file = "smart_open-7.1.0.tar.gz", hash = "sha256:a4f09f84f0f6d3637c6543aca7b5487438877a21360e7368ccf1f704789752ba"}, + {file = "smart_open-7.3.0.post1-py3-none-any.whl", hash = "sha256:c73661a2c24bf045c1e04e08fffc585b59af023fe783d57896f590489db66fb4"}, + {file = "smart_open-7.3.0.post1.tar.gz", hash = "sha256:ce6a3d9bc1afbf6234ad13c010b77f8cd36d24636811e3c52c3b5160f5214d1e"}, ] [package.dependencies] @@ -1917,13 +1930,13 @@ requests = {version = "*", optional = true, markers = "extra == \"http\""} wrapt = "*" [package.extras] -all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests", "zstandard"] +all = ["smart_open[azure,gcs,http,s3,ssh,webhdfs,zst]"] azure = ["azure-common", "azure-core", "azure-storage-blob"] gcs = ["google-cloud-storage (>=2.6.0)"] http = ["requests"] s3 = ["boto3"] ssh = ["paramiko"] -test = ["awscli", "azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "numpy", "paramiko", "pyopenssl", "pytest", "pytest-benchmark", "pytest-rerunfailures", "requests", "responses", "zstandard"] +test = ["awscli", "moto[server]", "numpy", "pyopenssl", "pytest", "pytest-rerunfailures", "pytest_benchmark", "responses", "smart_open[all]"] webhdfs = ["requests"] zst = ["zstandard"] @@ -2013,24 +2026,24 @@ telegram = ["requests"] [[package]] name = "types-python-dateutil" -version = "2.9.0.20250516" +version = "2.9.0.20250708" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.9" files = [ - {file = "types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93"}, - {file = "types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5"}, + {file = "types_python_dateutil-2.9.0.20250708-py3-none-any.whl", hash = "sha256:4d6d0cc1cc4d24a2dc3816024e502564094497b713f7befda4d5bc7a8e3fd21f"}, + {file = "types_python_dateutil-2.9.0.20250708.tar.gz", hash = "sha256:ccdbd75dab2d6c9696c350579f34cffe2c281e4c5f27a585b2a2438dd1d5c8ab"}, ] [[package]] name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, ] [[package]] @@ -2147,13 +2160,13 @@ files = [ [[package]] name = "urllib3" -version = "2.4.0" +version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" files = [ - {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, - {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] @@ -2264,4 +2277,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "9a44e720f99e6df236ed3540a290a268409c157b29fdab3a2d285d4dcac10429" +content-hash = "0e5a37aa53eeb38742583be9844ef2153c31779a367d1dfa8fbc1c73f0aabd87" diff --git a/pyproject.toml b/pyproject.toml index e83291465..eb79cb913 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "label-studio-sdk" [tool.poetry] name = "label-studio-sdk" -version = "1.0.15" +version = "1.0.19" description = "" readme = "README.md" authors = [] @@ -35,7 +35,7 @@ Repository = 'https://github.com/HumanSignal/label-studio-sdk' [tool.poetry.dependencies] python = ">=3.9,<4" -Pillow = ">=10.0.1" +Pillow = ">=11.3.0" appdirs = ">=1.4.3" datamodel-code-generator = "0.26.1" httpx = ">=0.21.2" @@ -48,22 +48,23 @@ numpy = ">=1.26.4,<3.0.0" opencv-python = "^4.9.0" pandas = ">=0.24.0" pydantic = ">= 1.9.2" -pydantic-core = "^2.18.2" +pydantic-core = ">=2.18.2" pyjwt = "^2.10.1" requests = ">=2.22.0" requests-mock = "1.12.1" typing_extensions = ">= 4.0.0" ujson = ">=5.8.0" +urllib3 = ">=2.5.0" xmljson = "0.2.1" -[tool.poetry.dev-dependencies] -mypy = "1.0.1" +[tool.poetry.group.dev.dependencies] +mypy = "==1.13.0" pytest = "^7.4.0" pytest-asyncio = "^0.23.5" python-dateutil = "^2.9.0" types-python-dateutil = "^2.9.0.20240316" respx = "^0.22.0" -ruff = "^0.5.6" +ruff = "==0.11.5" [tool.pytest.ini_options] testpaths = [ "tests" ] @@ -75,6 +76,26 @@ plugins = ["pydantic.mypy"] [tool.ruff] line-length = 120 +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "I", # isort +] +ignore = [ + "E402", # Module level import not at top of file + "E501", # Line too long + "E711", # Comparison to `None` should be `cond is not None` + "E712", # Avoid equality comparisons to `True`; use `if ...:` checks + "E721", # Use `is` and `is not` for type comparisons, or `isinstance()` for insinstance checks + "E722", # Do not use bare `except` + "E731", # Do not assign a `lambda` expression, use a `def` + "F821", # Undefined name + "F841" # Local variable ... is assigned to but never used +] + +[tool.ruff.lint.isort] +section-order = ["future", "standard-library", "third-party", "first-party"] [build-system] requires = ["poetry-core"] diff --git a/reference.md b/reference.md index b8d9c4e0c..142332308 100644 --- a/reference.md +++ b/reference.md @@ -2122,7 +2122,7 @@ Update a specific uploaded file. To get the file upload ID, use [Get files list] You will need to include the file data in the request body. For example: ```bash -curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ +curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/import/file-upload/245' -F 'file=@path/to/my_file.csv' ``` @@ -3945,7 +3945,7 @@ client.projects.create()
-**control_weights:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} +**control_weights:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}}
@@ -4290,7 +4290,7 @@ client.projects.update(
-**control_weights:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} +**control_weights:** `typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]` — Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}}
@@ -4350,18 +4350,17 @@ For example, if the label configuration has a *$text* variable, then each item i There are three possible ways to import tasks with this endpoint: -#### 1\. **POST with data** +#### 1. **POST with data** Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. Update this example to specify your authorization token and Label Studio instance host, then run the following from the command line: ```bash -curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ --X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' +curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' ``` -#### 2\. **POST with files** +#### 2. **POST with files** Send tasks as files. You can attach multiple files with different names. - **JSON**: text files in JavaScript object notation format @@ -4373,17 +4372,14 @@ Update this example to specify your authorization token, Label Studio instance h then run the following from the command line: ```bash -curl -H 'Authorization: Token abc123' \ --X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ +curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' -F 'file=@path/to/my_file.csv' ``` -#### 3\. **POST with URL** +#### 3. **POST with URL** You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. ```bash -curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ --X POST 'https://localhost:8080/api/projects/1/import' \ ---data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' +curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' ```
@@ -6580,6 +6576,14 @@ client.prompts.batch_predictions()
+**num_predictions:** `typing.Optional[int]` — Number of predictions being sent + +
+
+ +
+
+ **modelrun_id:** `typing.Optional[int]` — Model Run ID to associate the prediction with
@@ -6656,6 +6660,14 @@ client.prompts.batch_failed_predictions()
+**num_failed_predictions:** `typing.Optional[int]` — Number of failed predictions being sent + +
+
+ +
+
+ **modelrun_id:** `typing.Optional[int]` — Model Run ID where the failed predictions came from
diff --git a/requirements.txt b/requirements.txt index 80e1ba6d9..3cbb89e46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,20 +1,21 @@ -Pillow>=10.0.1 +Pillow>=11.3.0 appdirs>=1.4.3 datamodel-code-generator==0.26.1 httpx>=0.21.2 ijson>=3.2.3 -jsf==^0.11.2 +jsf==0.11.2 jsonschema>=4.23.0 lxml>=4.2.5 -nltk==^3.9.1 +nltk==3.9.1 numpy>=1.26.4,<3.0.0 -opencv-python==^4.9.0 +opencv-python==4.9.0 pandas>=0.24.0 pydantic>= 1.9.2 -pydantic-core==^2.18.2 -pyjwt==^2.10.1 +pydantic-core>=2.18.2 +pyjwt==2.10.1 requests>=2.22.0 requests-mock==1.12.1 typing_extensions>= 4.0.0 ujson>=5.8.0 +urllib3>=2.5.0 xmljson==0.2.1 diff --git a/src/label_studio_sdk/__init__.py b/src/label_studio_sdk/__init__.py index 2159010c2..664ecced9 100644 --- a/src/label_studio_sdk/__init__.py +++ b/src/label_studio_sdk/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( AccessTokenResponse, Annotation, @@ -23,8 +25,10 @@ ConvertedFormatStatus, DataManagerTaskSerializer, DataManagerTaskSerializerAnnotatorsItem, + DataManagerTaskSerializerCommentAuthorsItem, DataManagerTaskSerializerDraftsItem, DataManagerTaskSerializerPredictionsItem, + DataManagerTaskSerializerPredictionsItemModelRun, Export, ExportFormat, ExportSnapshot, @@ -227,8 +231,10 @@ "ConvertedFormatStatus", "DataManagerTaskSerializer", "DataManagerTaskSerializerAnnotatorsItem", + "DataManagerTaskSerializerCommentAuthorsItem", "DataManagerTaskSerializerDraftsItem", "DataManagerTaskSerializerPredictionsItem", + "DataManagerTaskSerializerPredictionsItemModelRun", "Export", "ExportFormat", "ExportSnapshot", diff --git a/src/label_studio_sdk/actions/__init__.py b/src/label_studio_sdk/actions/__init__.py index 99e4531ac..574cdb2ce 100644 --- a/src/label_studio_sdk/actions/__init__.py +++ b/src/label_studio_sdk/actions/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( ActionsCreateRequestFilters, ActionsCreateRequestFiltersConjunction, diff --git a/src/label_studio_sdk/actions/client.py b/src/label_studio_sdk/actions/client.py index 1624f4cdc..01f45e2ba 100644 --- a/src/label_studio_sdk/actions/client.py +++ b/src/label_studio_sdk/actions/client.py @@ -1,18 +1,14 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.actions_create_request_id import ActionsCreateRequestId +from .raw_client import AsyncRawActionsClient, RawActionsClient from .types.actions_create_request_filters import ActionsCreateRequestFilters -from .types.actions_create_request_selected_items import ( - ActionsCreateRequestSelectedItems, -) +from .types.actions_create_request_id import ActionsCreateRequestId from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.client_wrapper import AsyncClientWrapper +from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -20,7 +16,18 @@ class ActionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawActionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawActionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawActionsClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -44,18 +51,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> No ) client.actions.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -131,45 +128,32 @@ def create( ordering=["tasks:total_annotations"], ) """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="POST", - params={ - "id": id, - "project": project, - "view": view, - }, - json={ - "filters": convert_and_respect_annotation_metadata( - object_=filters, - annotation=ActionsCreateRequestFilters, - direction="write", - ), - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, - annotation=ActionsCreateRequestSelectedItems, - direction="write", - ), - "ordering": ordering, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + id=id, + project=project, + view=view, + filters=filters, + selected_items=selected_items, + ordering=ordering, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncActionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawActionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawActionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawActionsClient + """ + return self._raw_client async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -201,18 +185,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -296,37 +270,13 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/actions/", - method="POST", - params={ - "id": id, - "project": project, - "view": view, - }, - json={ - "filters": convert_and_respect_annotation_metadata( - object_=filters, - annotation=ActionsCreateRequestFilters, - direction="write", - ), - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, - annotation=ActionsCreateRequestSelectedItems, - direction="write", - ), - "ordering": ordering, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + id=id, + project=project, + view=view, + filters=filters, + selected_items=selected_items, + ordering=ordering, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/actions/raw_client.py b/src/label_studio_sdk/actions/raw_client.py new file mode 100644 index 000000000..5c45e77ef --- /dev/null +++ b/src/label_studio_sdk/actions/raw_client.py @@ -0,0 +1,223 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from .types.actions_create_request_filters import ActionsCreateRequestFilters +from .types.actions_create_request_id import ActionsCreateRequestId +from .types.actions_create_request_ordering_item import ActionsCreateRequestOrderingItem +from .types.actions_create_request_selected_items import ActionsCreateRequestSelectedItems + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawActionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Retrieve all the registered actions with descriptions that data manager can use. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + id: ActionsCreateRequestId, + project: int, + view: typing.Optional[int] = None, + filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, + selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, + ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` + + Parameters + ---------- + id : ActionsCreateRequestId + Action name ID, see the full list of actions in the `GET api/actions` request + + project : int + Project ID + + view : typing.Optional[int] + View ID (optional, it has higher priority than filters, selectedItems and ordering from the request body payload) + + filters : typing.Optional[ActionsCreateRequestFilters] + Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + + selected_items : typing.Optional[ActionsCreateRequestSelectedItems] + Task selection by IDs. If filters are applied, the selection will be applied to the filtered tasks.If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + + ordering : typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] + List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="POST", + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, annotation=ActionsCreateRequestFilters, direction="write" + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawActionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[None]: + """ + Retrieve all the registered actions with descriptions that data manager can use. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + id: ActionsCreateRequestId, + project: int, + view: typing.Optional[int] = None, + filters: typing.Optional[ActionsCreateRequestFilters] = OMIT, + selected_items: typing.Optional[ActionsCreateRequestSelectedItems] = OMIT, + ordering: typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + Perform a Data Manager action with the selected tasks and filters. Note: More complex actions require additional parameters in the request body. Call `GET api/actions?project=` to explore them.
Example: `GET api/actions?id=delete_tasks&project=1` + + Parameters + ---------- + id : ActionsCreateRequestId + Action name ID, see the full list of actions in the `GET api/actions` request + + project : int + Project ID + + view : typing.Optional[int] + View ID (optional, it has higher priority than filters, selectedItems and ordering from the request body payload) + + filters : typing.Optional[ActionsCreateRequestFilters] + Filters to apply on tasks. You can use [the helper class `Filters` from this page](https://labelstud.io/sdk/data_manager.html) to create Data Manager Filters.
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + + selected_items : typing.Optional[ActionsCreateRequestSelectedItems] + Task selection by IDs. If filters are applied, the selection will be applied to the filtered tasks.If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + + ordering : typing.Optional[typing.Sequence[ActionsCreateRequestOrderingItem]] + List of fields to order by. Fields are similar to filters but without the `filter:` prefix. To reverse the order, add a minus sign before the field name, e.g. `-tasks:created_at`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/actions/", + method="POST", + params={ + "id": id, + "project": project, + "view": view, + }, + json={ + "filters": convert_and_respect_annotation_metadata( + object_=filters, annotation=ActionsCreateRequestFilters, direction="write" + ), + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=ActionsCreateRequestSelectedItems, direction="write" + ), + "ordering": ordering, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/actions/types/__init__.py b/src/label_studio_sdk/actions/types/__init__.py index f44d52635..18d6b19ea 100644 --- a/src/label_studio_sdk/actions/types/__init__.py +++ b/src/label_studio_sdk/actions/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .actions_create_request_filters import ActionsCreateRequestFilters from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction from .actions_create_request_filters_items_item import ActionsCreateRequestFiltersItemsItem diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters.py b/src/label_studio_sdk/actions/types/actions_create_request_filters.py index 85e1e9ec9..1a20a01c2 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters.py @@ -1,15 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .actions_create_request_filters_conjunction import ( - ActionsCreateRequestFiltersConjunction, -) -import pydantic import typing -from .actions_create_request_filters_items_item import ( - ActionsCreateRequestFiltersItemsItem, -) -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .actions_create_request_filters_conjunction import ActionsCreateRequestFiltersConjunction +from .actions_create_request_filters_items_item import ActionsCreateRequestFiltersItemsItem class ActionsCreateRequestFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py index aa09d841e..c5442ec10 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_filters_items_item.py @@ -1,19 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .actions_create_request_filters_items_item_filter import ( - ActionsCreateRequestFiltersItemsItemFilter, -) -import pydantic -from .actions_create_request_filters_items_item_operator import ( - ActionsCreateRequestFiltersItemsItemOperator, -) -from .actions_create_request_filters_items_item_value import ( - ActionsCreateRequestFiltersItemsItemValue, -) -from ...core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .actions_create_request_filters_items_item_filter import ActionsCreateRequestFiltersItemsItemFilter +from .actions_create_request_filters_items_item_operator import ActionsCreateRequestFiltersItemsItemOperator +from .actions_create_request_filters_items_item_value import ActionsCreateRequestFiltersItemsItemValue + class ActionsCreateRequestFiltersItemsItem(UniversalBaseModel): filter: ActionsCreateRequestFiltersItemsItemFilter = pydantic.Field() diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py index 88f37db12..2ceac5aac 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items.py @@ -1,12 +1,9 @@ # This file was auto-generated by Fern from our API Definition. import typing -from .actions_create_request_selected_items_included import ( - ActionsCreateRequestSelectedItemsIncluded, -) -from .actions_create_request_selected_items_excluded import ( - ActionsCreateRequestSelectedItemsExcluded, -) + +from .actions_create_request_selected_items_excluded import ActionsCreateRequestSelectedItemsExcluded +from .actions_create_request_selected_items_included import ActionsCreateRequestSelectedItemsIncluded ActionsCreateRequestSelectedItems = typing.Union[ ActionsCreateRequestSelectedItemsIncluded, ActionsCreateRequestSelectedItemsExcluded diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py index e1d15e854..89b9d4466 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel +import typing + +import pydantic import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata -import pydantic -import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestSelectedItemsExcluded(UniversalBaseModel): diff --git a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py index 90f2ec326..7d943e6c5 100644 --- a/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py +++ b/src/label_studio_sdk/actions/types/actions_create_request_selected_items_included.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel +import typing + +import pydantic import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata -import pydantic -import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ActionsCreateRequestSelectedItemsIncluded(UniversalBaseModel): diff --git a/src/label_studio_sdk/annotations/__init__.py b/src/label_studio_sdk/annotations/__init__.py index fb830a2b1..51c6a9415 100644 --- a/src/label_studio_sdk/annotations/__init__.py +++ b/src/label_studio_sdk/annotations/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import AnnotationsCreateBulkRequestSelectedItems, AnnotationsCreateBulkResponseItem __all__ = ["AnnotationsCreateBulkRequestSelectedItems", "AnnotationsCreateBulkResponseItem"] diff --git a/src/label_studio_sdk/annotations/client.py b/src/label_studio_sdk/annotations/client.py index 61993721e..f6c692d23 100644 --- a/src/label_studio_sdk/annotations/client.py +++ b/src/label_studio_sdk/annotations/client.py @@ -1,21 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.annotation import Annotation -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.annotations_create_bulk_request_selected_items import ( - AnnotationsCreateBulkRequestSelectedItems, -) -from .types.annotations_create_bulk_response_item import ( - AnnotationsCreateBulkResponseItem, -) -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawAnnotationsClient, RawAnnotationsClient +from .types.annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems +from .types.annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +15,18 @@ class AnnotationsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAnnotationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAnnotationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAnnotationsClient + """ + return self._raw_client def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ @@ -56,24 +59,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -107,18 +94,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -210,38 +187,19 @@ def update( ground_truth=True, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Annotation]: """ @@ -274,24 +232,8 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Annotation], - parse_obj_as( - type_=typing.List[Annotation], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, request_options=request_options) + return _response.data def create( self, @@ -396,38 +338,19 @@ def create( ground_truth=True, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="POST", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create_bulk( self, @@ -471,44 +394,31 @@ def create_bulk( ) client.annotations.create_bulk() """ - _response = self._client_wrapper.httpx_client.request( - "api/annotations/bulk", - method="POST", - json={ - "tasks": tasks, - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, - annotation=AnnotationsCreateBulkRequestSelectedItems, - direction="write", - ), - "lead_time": lead_time, - "project": project, - "result": result, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create_bulk( + tasks=tasks, + selected_items=selected_items, + lead_time=lead_time, + project=project, + result=result, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AnnotationsCreateBulkResponseItem], - parse_obj_as( - type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncAnnotationsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAnnotationsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAnnotationsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAnnotationsClient + """ + return self._raw_client async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Annotation: """ @@ -549,24 +459,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -608,18 +502,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -719,38 +603,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/annotations/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -793,24 +658,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Annotation], - parse_obj_as( - type_=typing.List[Annotation], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, request_options=request_options) + return _response.data async def create( self, @@ -923,38 +772,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/annotations/", - method="POST", - json={ - "result": result, - "task": task, - "project": project, - "completed_by": completed_by, - "updated_by": updated_by, - "was_cancelled": was_cancelled, - "ground_truth": ground_truth, - "lead_time": lead_time, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + id, + result=result, + task=task, + project=project, + completed_by=completed_by, + updated_by=updated_by, + was_cancelled=was_cancelled, + ground_truth=ground_truth, + lead_time=lead_time, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Annotation, - parse_obj_as( - type_=Annotation, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create_bulk( self, @@ -1006,36 +836,12 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/annotations/bulk", - method="POST", - json={ - "tasks": tasks, - "selectedItems": convert_and_respect_annotation_metadata( - object_=selected_items, - annotation=AnnotationsCreateBulkRequestSelectedItems, - direction="write", - ), - "lead_time": lead_time, - "project": project, - "result": result, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create_bulk( + tasks=tasks, + selected_items=selected_items, + lead_time=lead_time, + project=project, + result=result, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AnnotationsCreateBulkResponseItem], - parse_obj_as( - type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/annotations/raw_client.py b/src/label_studio_sdk/annotations/raw_client.py new file mode 100644 index 000000000..e6e2a2650 --- /dev/null +++ b/src/label_studio_sdk/annotations/raw_client.py @@ -0,0 +1,794 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.annotation import Annotation +from .types.annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems +from .types.annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAnnotationsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Annotation]: + """ + + Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. + + You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Annotation] + Retrieved annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete an annotation. + + This action can't be undone! + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Annotation]: + """ + + Update attributes for an existing annotation. + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Annotation] + Updated annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[Annotation]]: + """ + + List all annotations for a task. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + Parameters + ---------- + id : int + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Annotation]] + Annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Annotation]: + """ + + Add annotations to a task like an annotator does. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + + The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST + request to send an empty annotation with the ID of the user who completed the task: + + ```json + { + "result": {}, + "was_cancelled": true, + "ground_truth": true, + "lead_time": 0, + "task": 0 + "completed_by": 123 + } + ``` + + Parameters + ---------- + id : int + Task ID + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Annotation] + Created annotation + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="POST", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create_bulk( + self, + *, + tasks: typing.Optional[typing.Sequence[int]] = OMIT, + selected_items: typing.Optional[AnnotationsCreateBulkRequestSelectedItems] = OMIT, + lead_time: typing.Optional[float] = OMIT, + project: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[AnnotationsCreateBulkResponseItem]]: + """ + Create multiple annotations for specific tasks in a bulk operation. + + Parameters + ---------- + tasks : typing.Optional[typing.Sequence[int]] + + selected_items : typing.Optional[AnnotationsCreateBulkRequestSelectedItems] + + lead_time : typing.Optional[float] + + project : typing.Optional[int] + + result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[AnnotationsCreateBulkResponseItem]] + Annotations created successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/annotations/bulk", + method="POST", + json={ + "tasks": tasks, + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" + ), + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawAnnotationsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Annotation]: + """ + + Tasks can have multiple annotations. Use this call to retrieve a specific annotation using its ID. + + You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Annotation] + Retrieved annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete an annotation. + + This action can't be undone! + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Annotation]: + """ + + Update attributes for an existing annotation. + + You will need to supply the annotation's unique ID. You can find the ID in the Label Studio UI listed at the top of the annotation in its tab. It is also listed in the History panel when viewing the annotation. Or you can use [Get all task annotations](list) to find all annotation IDs. + + For information about the JSON format used in the result, see [Label Studio JSON format of annotated tasks](https://labelstud.io/guide/export#Label-Studio-JSON-format-of-annotated-tasks). + + Parameters + ---------- + id : int + A unique integer value identifying this annotation. + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Annotation] + Updated annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/annotations/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Annotation]]: + """ + + List all annotations for a task. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + Parameters + ---------- + id : int + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Annotation]] + Annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Annotation], + parse_obj_as( + type_=typing.List[Annotation], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + id: int, + *, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + task: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + completed_by: typing.Optional[int] = OMIT, + updated_by: typing.Optional[int] = OMIT, + was_cancelled: typing.Optional[bool] = OMIT, + ground_truth: typing.Optional[bool] = OMIT, + lead_time: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Annotation]: + """ + + Add annotations to a task like an annotator does. + + You will need to supply the task ID. You can find this in Label Studio by opening a task and checking the URL. It is also listed at the top of the labeling interface. Or you can use [Get tasks list](../tasks/list). + + + The content of the result field depends on your labeling configuration. For example, send the following data as part of your POST + request to send an empty annotation with the ID of the user who completed the task: + + ```json + { + "result": {}, + "was_cancelled": true, + "ground_truth": true, + "lead_time": 0, + "task": 0 + "completed_by": 123 + } + ``` + + Parameters + ---------- + id : int + Task ID + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Labeling result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/task_format) + + task : typing.Optional[int] + Corresponding task for this annotation + + project : typing.Optional[int] + Project ID for this annotation + + completed_by : typing.Optional[int] + User ID of the person who created this annotation + + updated_by : typing.Optional[int] + Last user who updated this annotation + + was_cancelled : typing.Optional[bool] + User skipped the task + + ground_truth : typing.Optional[bool] + This annotation is a Ground Truth + + lead_time : typing.Optional[float] + How much time it took to annotate the task (in seconds) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Annotation] + Created annotation + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/annotations/", + method="POST", + json={ + "result": result, + "task": task, + "project": project, + "completed_by": completed_by, + "updated_by": updated_by, + "was_cancelled": was_cancelled, + "ground_truth": ground_truth, + "lead_time": lead_time, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Annotation, + parse_obj_as( + type_=Annotation, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create_bulk( + self, + *, + tasks: typing.Optional[typing.Sequence[int]] = OMIT, + selected_items: typing.Optional[AnnotationsCreateBulkRequestSelectedItems] = OMIT, + lead_time: typing.Optional[float] = OMIT, + project: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[AnnotationsCreateBulkResponseItem]]: + """ + Create multiple annotations for specific tasks in a bulk operation. + + Parameters + ---------- + tasks : typing.Optional[typing.Sequence[int]] + + selected_items : typing.Optional[AnnotationsCreateBulkRequestSelectedItems] + + lead_time : typing.Optional[float] + + project : typing.Optional[int] + + result : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[AnnotationsCreateBulkResponseItem]] + Annotations created successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/annotations/bulk", + method="POST", + json={ + "tasks": tasks, + "selectedItems": convert_and_respect_annotation_metadata( + object_=selected_items, annotation=AnnotationsCreateBulkRequestSelectedItems, direction="write" + ), + "lead_time": lead_time, + "project": project, + "result": result, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AnnotationsCreateBulkResponseItem], + parse_obj_as( + type_=typing.List[AnnotationsCreateBulkResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/annotations/types/__init__.py b/src/label_studio_sdk/annotations/types/__init__.py index 0ec046f76..4bacc3ec7 100644 --- a/src/label_studio_sdk/annotations/types/__init__.py +++ b/src/label_studio_sdk/annotations/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .annotations_create_bulk_request_selected_items import AnnotationsCreateBulkRequestSelectedItems from .annotations_create_bulk_response_item import AnnotationsCreateBulkResponseItem diff --git a/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py b/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py index c3209d6c3..5a1c02d68 100644 --- a/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py +++ b/src/label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -import typing_extensions import typing -from ...core.serialization import FieldMetadata + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...core.serialization import FieldMetadata class AnnotationsCreateBulkRequestSelectedItems(UniversalBaseModel): diff --git a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py index a023f4f3d..856097dc3 100644 --- a/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +++ b/src/label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AnnotationsCreateBulkResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/base_client.py b/src/label_studio_sdk/base_client.py index ade8eacb8..c31271509 100644 --- a/src/label_studio_sdk/base_client.py +++ b/src/label_studio_sdk/base_client.py @@ -1,50 +1,31 @@ # This file was auto-generated by Fern from our API Definition. -import typing -from .environment import LabelStudioEnvironment import os +import typing + import httpx +from .actions.client import ActionsClient, AsyncActionsClient +from .annotations.client import AnnotationsClient, AsyncAnnotationsClient +from .comments.client import AsyncCommentsClient, CommentsClient from .core.api_error import ApiError -from .core.client_wrapper import SyncClientWrapper -from .annotations.client import AnnotationsClient -from .users.client import UsersClient -from .actions.client import ActionsClient -from .views.client import ViewsClient -from .files.client import FilesClient -from .ml.client import MlClient -from .predictions.client import PredictionsClient -from .projects.client import ProjectsClient -from .tasks.client import TasksClient -from .import_storage.client import ImportStorageClient -from .export_storage.client import ExportStorageClient -from .webhooks.client import WebhooksClient -from .versions.client import VersionsClient -from .prompts.client import PromptsClient -from .model_providers.client import ModelProvidersClient -from .comments.client import CommentsClient -from .workspaces.client import WorkspacesClient -from .tokens.client import TokensClient -from .jwt_settings.client import JwtSettingsClient -from .core.client_wrapper import AsyncClientWrapper -from .annotations.client import AsyncAnnotationsClient -from .users.client import AsyncUsersClient -from .actions.client import AsyncActionsClient -from .views.client import AsyncViewsClient -from .files.client import AsyncFilesClient -from .ml.client import AsyncMlClient -from .predictions.client import AsyncPredictionsClient -from .projects.client import AsyncProjectsClient -from .tasks.client import AsyncTasksClient -from .import_storage.client import AsyncImportStorageClient -from .export_storage.client import AsyncExportStorageClient -from .webhooks.client import AsyncWebhooksClient -from .versions.client import AsyncVersionsClient -from .prompts.client import AsyncPromptsClient -from .model_providers.client import AsyncModelProvidersClient -from .comments.client import AsyncCommentsClient -from .workspaces.client import AsyncWorkspacesClient -from .tokens.client import AsyncTokensClient -from .jwt_settings.client import AsyncJwtSettingsClient +from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .environment import LabelStudioEnvironment +from .export_storage.client import AsyncExportStorageClient, ExportStorageClient +from .files.client import AsyncFilesClient, FilesClient +from .import_storage.client import AsyncImportStorageClient, ImportStorageClient +from .jwt_settings.client import AsyncJwtSettingsClient, JwtSettingsClient +from .ml.client import AsyncMlClient, MlClient +from .model_providers.client import AsyncModelProvidersClient, ModelProvidersClient +from .predictions.client import AsyncPredictionsClient, PredictionsClient +from .projects.client import AsyncProjectsClient, ProjectsClient +from .prompts.client import AsyncPromptsClient, PromptsClient +from .tasks.client import AsyncTasksClient, TasksClient +from .tokens.client import AsyncTokensClient, TokensClient +from .users.client import AsyncUsersClient, UsersClient +from .versions.client import AsyncVersionsClient, VersionsClient +from .views.client import AsyncViewsClient, ViewsClient +from .webhooks.client import AsyncWebhooksClient, WebhooksClient +from .workspaces.client import AsyncWorkspacesClient, WorkspacesClient class LabelStudioBase: @@ -66,6 +47,9 @@ class LabelStudioBase: api_key : typing.Optional[str] + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + timeout : typing.Optional[float] The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. @@ -90,6 +74,7 @@ def __init__( base_url: typing.Optional[str] = None, environment: LabelStudioEnvironment = LabelStudioEnvironment.DEFAULT, api_key: typing.Optional[str] = os.getenv("LABEL_STUDIO_API_KEY"), + headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.Client] = None, @@ -104,6 +89,7 @@ def __init__( self._client_wrapper = SyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), api_key=api_key, + headers=headers, httpx_client=httpx_client if httpx_client is not None else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects) @@ -151,6 +137,9 @@ class AsyncLabelStudioBase: api_key : typing.Optional[str] + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + timeout : typing.Optional[float] The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. @@ -175,6 +164,7 @@ def __init__( base_url: typing.Optional[str] = None, environment: LabelStudioEnvironment = LabelStudioEnvironment.DEFAULT, api_key: typing.Optional[str] = os.getenv("LABEL_STUDIO_API_KEY"), + headers: typing.Optional[typing.Dict[str, str]] = None, timeout: typing.Optional[float] = None, follow_redirects: typing.Optional[bool] = True, httpx_client: typing.Optional[httpx.AsyncClient] = None, @@ -189,6 +179,7 @@ def __init__( self._client_wrapper = AsyncClientWrapper( base_url=_get_base_url(base_url=base_url, environment=environment), api_key=api_key, + headers=headers, httpx_client=httpx_client if httpx_client is not None else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects) diff --git a/src/label_studio_sdk/comments/__init__.py b/src/label_studio_sdk/comments/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/comments/__init__.py +++ b/src/label_studio_sdk/comments/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/comments/client.py b/src/label_studio_sdk/comments/client.py index dec7e7a72..2a30a7ef3 100644 --- a/src/label_studio_sdk/comments/client.py +++ b/src/label_studio_sdk/comments/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.comment import Comment -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawCommentsClient, RawCommentsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class CommentsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawCommentsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawCommentsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawCommentsClient + """ + return self._raw_client def list( self, @@ -58,29 +66,10 @@ def list( ) client.comments.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/comments/", - method="GET", - params={ - "project": project, - "expand_created_by": expand_created_by, - "annotation": annotation, - }, - request_options=request_options, + _response = self._raw_client.list( + project=project, expand_created_by=expand_created_by, annotation=annotation, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Comment], - parse_obj_as( - type_=typing.List[Comment], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create( self, @@ -122,34 +111,10 @@ def create( ) client.comments.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/comments/", - method="POST", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + annotation=annotation, project=project, text=text, is_resolved=is_resolved, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ @@ -180,24 +145,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -227,18 +176,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -286,39 +225,31 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="PATCH", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + annotation=annotation, + project=project, + text=text, + is_resolved=is_resolved, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncCommentsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawCommentsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawCommentsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawCommentsClient + """ + return self._raw_client async def list( self, @@ -368,29 +299,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/comments/", - method="GET", - params={ - "project": project, - "expand_created_by": expand_created_by, - "annotation": annotation, - }, - request_options=request_options, + _response = await self._raw_client.list( + project=project, expand_created_by=expand_created_by, annotation=annotation, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Comment], - parse_obj_as( - type_=typing.List[Comment], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -440,34 +352,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/comments/", - method="POST", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + annotation=annotation, project=project, text=text, is_resolved=is_resolved, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Comment: """ @@ -506,24 +394,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -561,18 +433,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -628,31 +490,12 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/comments/{jsonable_encoder(id)}", - method="PATCH", - json={ - "annotation": annotation, - "project": project, - "text": text, - "is_resolved": is_resolved, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + annotation=annotation, + project=project, + text=text, + is_resolved=is_resolved, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Comment, - parse_obj_as( - type_=Comment, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/comments/raw_client.py b/src/label_studio_sdk/comments/raw_client.py new file mode 100644 index 000000000..c60535a93 --- /dev/null +++ b/src/label_studio_sdk/comments/raw_client.py @@ -0,0 +1,529 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.comment import Comment + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawCommentsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + project: typing.Optional[int] = None, + expand_created_by: typing.Optional[bool] = None, + annotation: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[Comment]]: + """ + + Get a list of comments for a specific project. + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + expand_created_by : typing.Optional[bool] + Expand the created_by field with object instead of ID + + annotation : typing.Optional[int] + Annotation ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Comment]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/comments/", + method="GET", + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Comment]: + """ + + Create a new comment. + + Parameters + ---------- + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Comment] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/comments/", + method="POST", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Comment]: + """ + + Get a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Comment] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Comment]: + """ + + Update a specific comment. + + Parameters + ---------- + id : int + Comment ID + + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Comment] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="PATCH", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawCommentsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + project: typing.Optional[int] = None, + expand_created_by: typing.Optional[bool] = None, + annotation: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[Comment]]: + """ + + Get a list of comments for a specific project. + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + expand_created_by : typing.Optional[bool] + Expand the created_by field with object instead of ID + + annotation : typing.Optional[int] + Annotation ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Comment]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/comments/", + method="GET", + params={ + "project": project, + "expand_created_by": expand_created_by, + "annotation": annotation, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Comment], + parse_obj_as( + type_=typing.List[Comment], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Comment]: + """ + + Create a new comment. + + Parameters + ---------- + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Comment] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/comments/", + method="POST", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Comment]: + """ + + Get a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Comment] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific comment. + + Parameters + ---------- + id : int + Comment ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + annotation: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + text: typing.Optional[str] = OMIT, + is_resolved: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Comment]: + """ + + Update a specific comment. + + Parameters + ---------- + id : int + Comment ID + + annotation : typing.Optional[int] + + project : typing.Optional[int] + + text : typing.Optional[str] + + is_resolved : typing.Optional[bool] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Comment] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/comments/{jsonable_encoder(id)}", + method="PATCH", + json={ + "annotation": annotation, + "project": project, + "text": text, + "is_resolved": is_resolved, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Comment, + parse_obj_as( + type_=Comment, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/core/__init__.py b/src/label_studio_sdk/core/__init__.py index 42031ad0b..d1461de7c 100644 --- a/src/label_studio_sdk/core/__init__.py +++ b/src/label_studio_sdk/core/__init__.py @@ -1,10 +1,13 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .api_error import ApiError from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper from .datetime_utils import serialize_datetime from .file import File, convert_file_dict_to_httpx_tuples, with_content_type from .http_client import AsyncHttpClient, HttpClient +from .http_response import AsyncHttpResponse, HttpResponse from .jsonable_encoder import jsonable_encoder from .pagination import AsyncPager, SyncPager from .pydantic_utilities import ( @@ -25,11 +28,13 @@ "ApiError", "AsyncClientWrapper", "AsyncHttpClient", + "AsyncHttpResponse", "AsyncPager", "BaseClientWrapper", "FieldMetadata", "File", "HttpClient", + "HttpResponse", "IS_PYDANTIC_V2", "RequestOptions", "SyncClientWrapper", diff --git a/src/label_studio_sdk/core/api_error.py b/src/label_studio_sdk/core/api_error.py index 2e9fc5431..6f850a60c 100644 --- a/src/label_studio_sdk/core/api_error.py +++ b/src/label_studio_sdk/core/api_error.py @@ -1,15 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from typing import Any, Dict, Optional class ApiError(Exception): - status_code: typing.Optional[int] - body: typing.Any + headers: Optional[Dict[str, str]] + status_code: Optional[int] + body: Any - def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None): + def __init__( + self, + *, + headers: Optional[Dict[str, str]] = None, + status_code: Optional[int] = None, + body: Any = None, + ) -> None: + self.headers = headers self.status_code = status_code self.body = body def __str__(self) -> str: - return f"status_code: {self.status_code}, body: {self.body}" + return f"headers: {self.headers}, status_code: {self.status_code}, body: {self.body}" diff --git a/src/label_studio_sdk/core/file.py b/src/label_studio_sdk/core/file.py index c6d11fc70..44b0d27c0 100644 --- a/src/label_studio_sdk/core/file.py +++ b/src/label_studio_sdk/core/file.py @@ -58,8 +58,7 @@ def with_content_type(*, file: File, default_content_type: str) -> File: return (filename, content, out_content_type) elif len(file) == 4: filename, content, file_content_type, headers = cast( # type: ignore - Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], - file, + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file ) out_content_type = file_content_type or default_content_type return (filename, content, out_content_type, headers) diff --git a/src/label_studio_sdk/core/force_multipart.py b/src/label_studio_sdk/core/force_multipart.py new file mode 100644 index 000000000..ae24ccff1 --- /dev/null +++ b/src/label_studio_sdk/core/force_multipart.py @@ -0,0 +1,16 @@ +# This file was auto-generated by Fern from our API Definition. + + +class ForceMultipartDict(dict): + """ + A dictionary subclass that always evaluates to True in boolean contexts. + + This is used to force multipart/form-data encoding in HTTP requests even when + the dictionary is empty, which would normally evaluate to False. + """ + + def __bool__(self): + return True + + +FORCE_MULTIPART = ForceMultipartDict() diff --git a/src/label_studio_sdk/core/http_client.py b/src/label_studio_sdk/core/http_client.py index 275a54cc8..e4173f990 100644 --- a/src/label_studio_sdk/core/http_client.py +++ b/src/label_studio_sdk/core/http_client.py @@ -2,7 +2,6 @@ import asyncio import email.utils -import json import re import time import typing @@ -11,12 +10,13 @@ from random import random import httpx - from .file import File, convert_file_dict_to_httpx_tuples +from .force_multipart import FORCE_MULTIPART from .jsonable_encoder import jsonable_encoder from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions +from httpx._types import RequestFiles INITIAL_RETRY_DELAY_SECONDS = 0.5 MAX_RETRY_DELAY_SECONDS = 10 @@ -180,11 +180,17 @@ def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -195,6 +201,15 @@ def request( json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + response = self.httpx_client.request( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -227,11 +242,7 @@ def request( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit) - else None - ), + files=request_files, timeout=timeout, ) @@ -266,11 +277,17 @@ def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.Iterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -279,6 +296,15 @@ def stream( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) with self.httpx_client.stream( @@ -313,11 +339,7 @@ def stream( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if (files is not None and files is not omit) - else None - ), + files=request_files, timeout=timeout, ) as stream: yield stream @@ -356,11 +378,17 @@ async def request( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> httpx.Response: base_url = self.get_base_url(base_url) timeout = ( @@ -369,6 +397,15 @@ async def request( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) # Add the input to each of these and do None-safety checks @@ -404,11 +441,7 @@ async def request( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if files is not None - else None - ), + files=request_files, timeout=timeout, ) @@ -442,11 +475,17 @@ async def stream( json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 2, omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, ) -> typing.AsyncIterator[httpx.Response]: base_url = self.get_base_url(base_url) timeout = ( @@ -455,6 +494,15 @@ async def stream( else self.base_timeout() ) + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) async with self.httpx_client.stream( @@ -489,11 +537,7 @@ async def stream( json=json_body, data=data_body, content=content, - files=( - convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) - if files is not None - else None - ), + files=request_files, timeout=timeout, ) as stream: yield stream diff --git a/src/label_studio_sdk/core/http_response.py b/src/label_studio_sdk/core/http_response.py new file mode 100644 index 000000000..48a1798a5 --- /dev/null +++ b/src/label_studio_sdk/core/http_response.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Dict, Generic, TypeVar + +import httpx + +T = TypeVar("T") +"""Generic to represent the underlying type of the data wrapped by the HTTP response.""" + + +class BaseHttpResponse: + """Minimalist HTTP response wrapper that exposes response headers.""" + + _response: httpx.Response + + def __init__(self, response: httpx.Response): + self._response = response + + @property + def headers(self) -> Dict[str, str]: + return dict(self._response.headers) + + +class HttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + def close(self) -> None: + self._response.close() + + +class AsyncHttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + async def close(self) -> None: + await self._response.aclose() diff --git a/src/label_studio_sdk/core/jsonable_encoder.py b/src/label_studio_sdk/core/jsonable_encoder.py index 1b631e901..afee3662d 100644 --- a/src/label_studio_sdk/core/jsonable_encoder.py +++ b/src/label_studio_sdk/core/jsonable_encoder.py @@ -17,7 +17,6 @@ from typing import Any, Callable, Dict, List, Optional, Set, Union import pydantic - from .datetime_utils import serialize_datetime from .pydantic_utilities import ( IS_PYDANTIC_V2, diff --git a/src/label_studio_sdk/core/pagination.py b/src/label_studio_sdk/core/pagination.py index 74f8ae61c..209a1ff14 100644 --- a/src/label_studio_sdk/core/pagination.py +++ b/src/label_studio_sdk/core/pagination.py @@ -1,13 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import typing +from __future__ import annotations -from typing_extensions import Self +from dataclasses import dataclass +from typing import AsyncIterator, Awaitable, Callable, Generic, Iterator, List, Optional, TypeVar -import pydantic +from .http_response import BaseHttpResponse -# Generic to represent the underlying type of the results within a page -T = typing.TypeVar("T") +T = TypeVar("T") +"""Generic to represent the underlying type of the results within a page""" # SDKs implement a Page ABC per-pagination request, the endpoint then returns a pager that wraps this type @@ -19,70 +20,63 @@ # # This should be the outer function that returns the SyncPager again # get_next=lambda: list(..., cursor: response.cursor) (or list(..., offset: offset + 1)) # ) -class BasePage(pydantic.BaseModel, typing.Generic[T]): - has_next: bool - items: typing.Optional[typing.List[T]] - - -class SyncPage(BasePage[T], typing.Generic[T]): - get_next: typing.Optional[typing.Callable[[], typing.Optional[Self]]] - - -class AsyncPage(BasePage[T], typing.Generic[T]): - get_next: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Optional[Self]]]] -# ---------------------------- - +@dataclass(frozen=True) +class SyncPager(Generic[T]): + get_next: Optional[Callable[[], Optional[SyncPager[T]]]] + has_next: bool + items: Optional[List[T]] + response: Optional[BaseHttpResponse] -class SyncPager(SyncPage[T], typing.Generic[T]): # Here we type ignore the iterator to avoid a mypy error # caused by the type conflict with Pydanitc's __iter__ method # brought in by extending the base model - def __iter__(self) -> typing.Iterator[T]: # type: ignore + def __iter__(self) -> Iterator[T]: # type: ignore[override] for page in self.iter_pages(): if page.items is not None: - for item in page.items: - yield item + yield from page.items - def iter_pages(self) -> typing.Iterator[SyncPage[T]]: - page: typing.Union[SyncPager[T], None] = self - while True: - if page is not None: - yield page - if page.has_next and page.get_next is not None: - page = page.get_next() - if page is None or page.items is None or len(page.items) == 0: - return - else: - return - else: + def iter_pages(self) -> Iterator[SyncPager[T]]: + page: Optional[SyncPager[T]] = self + while page is not None: + yield page + + if not page.has_next or page.get_next is None: + return + + page = page.get_next() + if page is None or page.items is None or len(page.items) == 0: return - def next_page(self) -> typing.Optional[SyncPage[T]]: + def next_page(self) -> Optional[SyncPager[T]]: return self.get_next() if self.get_next is not None else None -class AsyncPager(AsyncPage[T], typing.Generic[T]): - async def __aiter__(self) -> typing.AsyncIterator[T]: # type: ignore +@dataclass(frozen=True) +class AsyncPager(Generic[T]): + get_next: Optional[Callable[[], Awaitable[Optional[AsyncPager[T]]]]] + has_next: bool + items: Optional[List[T]] + response: Optional[BaseHttpResponse] + + async def __aiter__(self) -> AsyncIterator[T]: async for page in self.iter_pages(): if page.items is not None: for item in page.items: yield item - async def iter_pages(self) -> typing.AsyncIterator[AsyncPage[T]]: - page: typing.Union[AsyncPager[T], None] = self - while True: - if page is not None: - yield page - if page is not None and page.has_next and page.get_next is not None: - page = await page.get_next() - if page is None or page.items is None or len(page.items) == 0: - return - else: - return - else: + async def iter_pages(self) -> AsyncIterator[AsyncPager[T]]: + page: Optional[AsyncPager[T]] = self + while page is not None: + yield page + + if not page.has_next or page.get_next is None: + return + + page = await page.get_next() + if page is None or page.items is None or len(page.items) == 0: return - async def next_page(self) -> typing.Optional[AsyncPage[T]]: + async def next_page(self) -> Optional[AsyncPager[T]]: return await self.get_next() if self.get_next is not None else None diff --git a/src/label_studio_sdk/core/pydantic_utilities.py b/src/label_studio_sdk/core/pydantic_utilities.py index 93a9d7867..7db29500a 100644 --- a/src/label_studio_sdk/core/pydantic_utilities.py +++ b/src/label_studio_sdk/core/pydantic_utilities.py @@ -2,90 +2,66 @@ # nopycln: file import datetime as dt -import typing from collections import defaultdict - -import typing_extensions +from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast import pydantic -from .datetime_utils import serialize_datetime -from .serialization import convert_and_respect_annotation_metadata - IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") if IS_PYDANTIC_V2: - # isort will try to reformat the comments on these imports, which breaks mypy - # isort: off - from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - parse_date as parse_date, - ) - from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - parse_datetime as parse_datetime, - ) - from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - ENCODERS_BY_TYPE as encoders_by_type, - ) - from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 - get_args as get_args, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - get_origin as get_origin, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - is_literal_type as is_literal_type, - ) - from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 - is_union as is_union, - ) - from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + from pydantic.v1.datetime_parse import parse_date as parse_date + from pydantic.v1.datetime_parse import parse_datetime as parse_datetime + from pydantic.v1.fields import ModelField as ModelField + from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined] + from pydantic.v1.typing import get_args as get_args + from pydantic.v1.typing import get_origin as get_origin + from pydantic.v1.typing import is_literal_type as is_literal_type + from pydantic.v1.typing import is_union as is_union else: - from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 - from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 - from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 - from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 - from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 - from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 - from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 - from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 - - # isort: on + from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef] + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef] + from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef] + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef] + from pydantic.typing import get_args as get_args # type: ignore[no-redef] + from pydantic.typing import get_origin as get_origin # type: ignore[no-redef] + from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef] + from pydantic.typing import is_union as is_union # type: ignore[no-redef] +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata +from typing_extensions import TypeAlias -T = typing.TypeVar("T") -Model = typing.TypeVar("Model", bound=pydantic.BaseModel) +T = TypeVar("T") +Model = TypeVar("Model", bound=pydantic.BaseModel) -def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: +def parse_obj_as(type_: Type[T], object_: Any) -> T: dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") if IS_PYDANTIC_V2: - adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 + adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined] return adapter.validate_python(dealiased_object) - else: - return pydantic.parse_obj_as(type_, dealiased_object) + return pydantic.parse_obj_as(type_, dealiased_object) -def to_jsonable_with_fallback( - obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] -) -> typing.Any: +def to_jsonable_with_fallback(obj: Any, fallback_serializer: Callable[[Any], Any]) -> Any: if IS_PYDANTIC_V2: from pydantic_core import to_jsonable_python return to_jsonable_python(obj, fallback=fallback_serializer) - else: - return fallback_serializer(obj) + return fallback_serializer(obj) class UniversalBaseModel(pydantic.BaseModel): if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( + model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key] # Allow fields beginning with `model_` to be used in the model protected_namespaces=(), - ) # type: ignore # Pydantic v2 + ) - @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2 - def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2 - serialized = handler(self) + @pydantic.model_serializer(mode="plain", when_used="json") # type: ignore[attr-defined] + def serialize_model(self) -> Any: # type: ignore[name-defined] + serialized = self.model_dump() data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} return data @@ -96,38 +72,28 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} @classmethod - def model_construct( - cls: typing.Type["Model"], - _fields_set: typing.Optional[typing.Set[str]] = None, - **values: typing.Any, - ) -> "Model": + def model_construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") return cls.construct(_fields_set, **dealiased_object) @classmethod - def construct( - cls: typing.Type["Model"], - _fields_set: typing.Optional[typing.Set[str]] = None, - **values: typing.Any, - ) -> "Model": + def construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") if IS_PYDANTIC_V2: - return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2 - else: - return super().construct(_fields_set, **dealiased_object) + return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc] + return super().construct(_fields_set, **dealiased_object) - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { + def json(self, **kwargs: Any) -> str: + kwargs_with_defaults = { "by_alias": True, "exclude_unset": True, **kwargs, } if IS_PYDANTIC_V2: - return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 - else: - return super().json(**kwargs_with_defaults) + return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc] + return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + def dict(self, **kwargs: Any) -> Dict[str, Any]: """ Override the default dict method to `exclude_unset` by default. This function patches `exclude_unset` to work include fields within non-None default values. @@ -138,21 +104,21 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models # that we have less control over, and this is less intrusive than custom serializers for now. if IS_PYDANTIC_V2: - kwargs_with_defaults_exclude_unset: typing.Any = { + kwargs_with_defaults_exclude_unset = { **kwargs, "by_alias": True, "exclude_unset": True, "exclude_none": False, } - kwargs_with_defaults_exclude_none: typing.Any = { + kwargs_with_defaults_exclude_none = { **kwargs, "by_alias": True, "exclude_none": True, "exclude_unset": False, } dict_dump = deep_union_pydantic_dicts( - super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 - super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc] + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc] ) else: @@ -172,7 +138,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: if default is not None: self.__fields_set__.add(name) - kwargs_with_defaults_exclude_unset_include_fields: typing.Any = { + kwargs_with_defaults_exclude_unset_include_fields = { "by_alias": True, "exclude_unset": True, "include": _fields_set, @@ -184,12 +150,10 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write") -def _union_list_of_pydantic_dicts( - source: typing.List[typing.Any], destination: typing.List[typing.Any] -) -> typing.List[typing.Any]: - converted_list: typing.List[typing.Any] = [] +def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]: + converted_list: List[Any] = [] for i, item in enumerate(source): - destination_value = destination[i] # type: ignore + destination_value = destination[i] if isinstance(item, dict): converted_list.append(deep_union_pydantic_dicts(item, destination_value)) elif isinstance(item, list): @@ -199,9 +163,7 @@ def _union_list_of_pydantic_dicts( return converted_list -def deep_union_pydantic_dicts( - source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any] -) -> typing.Dict[str, typing.Any]: +def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any]) -> Dict[str, Any]: for key, value in source.items(): node = destination.setdefault(key, {}) if isinstance(value, dict): @@ -219,18 +181,16 @@ def deep_union_pydantic_dicts( if IS_PYDANTIC_V2: - class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg] pass - UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore + UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc] else: - UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore + UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef] -def encode_by_type(o: typing.Any) -> typing.Any: - encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = ( - defaultdict(tuple) - ) +def encode_by_type(o: Any) -> Any: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) for type_, encoder in encoders_by_type.items(): encoders_by_class_tuples[encoder] += (type_,) @@ -241,54 +201,49 @@ def encode_by_type(o: typing.Any) -> typing.Any: return encoder(o) -def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: +def update_forward_refs(model: Type["Model"], **localns: Any) -> None: if IS_PYDANTIC_V2: - model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2 + model.model_rebuild(raise_errors=False) # type: ignore[attr-defined] else: model.update_forward_refs(**localns) # Mirrors Pydantic's internal typing -AnyCallable = typing.Callable[..., typing.Any] +AnyCallable = Callable[..., Any] def universal_root_validator( pre: bool = False, -) -> typing.Callable[[AnyCallable], AnyCallable]: +) -> Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 - else: - return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 + return cast(AnyCallable, pydantic.model_validator(mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload] return decorator -def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: +def universal_field_validator(field_name: str, pre: bool = False) -> Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 - else: - return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1 + return cast(AnyCallable, pydantic.field_validator(field_name, mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func)) return decorator -PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo] +PydanticField = Union[ModelField, pydantic.fields.FieldInfo] -def _get_model_fields( - model: typing.Type["Model"], -) -> typing.Mapping[str, PydanticField]: +def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]: if IS_PYDANTIC_V2: - return model.model_fields # type: ignore # Pydantic v2 - else: - return model.__fields__ # type: ignore # Pydantic v1 + return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined] + return cast(Mapping[str, PydanticField], model.__fields__) -def _get_field_default(field: PydanticField) -> typing.Any: +def _get_field_default(field: PydanticField) -> Any: try: - value = field.get_default() # type: ignore # Pydantic < v1.10.15 + value = field.get_default() # type: ignore[union-attr] except: value = field.default if IS_PYDANTIC_V2: diff --git a/src/label_studio_sdk/core/serialization.py b/src/label_studio_sdk/core/serialization.py index cb5dcbf93..c36e865cc 100644 --- a/src/label_studio_sdk/core/serialization.py +++ b/src/label_studio_sdk/core/serialization.py @@ -4,9 +4,8 @@ import inspect import typing -import typing_extensions - import pydantic +import typing_extensions class FieldMetadata: @@ -161,7 +160,12 @@ def _convert_mapping( direction: typing.Literal["read", "write"], ) -> typing.Mapping[str, object]: converted_object: typing.Dict[str, object] = {} - annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + try: + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + except NameError: + # The TypedDict contains a circular reference, so + # we use the __annotations__ attribute directly. + annotations = getattr(expected_type, "__annotations__", {}) aliases_to_field_names = _get_alias_to_field_name(annotations) for key, value in object_.items(): if direction == "read" and key in aliases_to_field_names: diff --git a/src/label_studio_sdk/errors/__init__.py b/src/label_studio_sdk/errors/__init__.py index 076c429b4..27c365534 100644 --- a/src/label_studio_sdk/errors/__init__.py +++ b/src/label_studio_sdk/errors/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .bad_request_error import BadRequestError from .internal_server_error import InternalServerError from .not_found_error import NotFoundError diff --git a/src/label_studio_sdk/errors/bad_request_error.py b/src/label_studio_sdk/errors/bad_request_error.py index 9c13c61f9..baf5be4f7 100644 --- a/src/label_studio_sdk/errors/bad_request_error.py +++ b/src/label_studio_sdk/errors/bad_request_error.py @@ -1,9 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class BadRequestError(ApiError): - def __init__(self, body: typing.Optional[typing.Any]): - super().__init__(status_code=400, body=body) + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=400, headers=headers, body=body) diff --git a/src/label_studio_sdk/errors/internal_server_error.py b/src/label_studio_sdk/errors/internal_server_error.py index b4d235549..2c9be920b 100644 --- a/src/label_studio_sdk/errors/internal_server_error.py +++ b/src/label_studio_sdk/errors/internal_server_error.py @@ -1,8 +1,10 @@ # This file was auto-generated by Fern from our API Definition. +import typing + from ..core.api_error import ApiError class InternalServerError(ApiError): - def __init__(self, body: str): - super().__init__(status_code=500, body=body) + def __init__(self, body: str, headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=500, headers=headers, body=body) diff --git a/src/label_studio_sdk/errors/not_found_error.py b/src/label_studio_sdk/errors/not_found_error.py index a1235b87f..dcd60e383 100644 --- a/src/label_studio_sdk/errors/not_found_error.py +++ b/src/label_studio_sdk/errors/not_found_error.py @@ -1,9 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class NotFoundError(ApiError): - def __init__(self, body: typing.Optional[typing.Any]): - super().__init__(status_code=404, body=body) + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=404, headers=headers, body=body) diff --git a/src/label_studio_sdk/errors/unauthorized_error.py b/src/label_studio_sdk/errors/unauthorized_error.py index 1c00f98ab..c83b25c26 100644 --- a/src/label_studio_sdk/errors/unauthorized_error.py +++ b/src/label_studio_sdk/errors/unauthorized_error.py @@ -1,9 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.api_error import ApiError import typing +from ..core.api_error import ApiError + class UnauthorizedError(ApiError): - def __init__(self, body: typing.Optional[typing.Any]): - super().__init__(status_code=401, body=body) + def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + super().__init__(status_code=401, headers=headers, body=body) diff --git a/src/label_studio_sdk/export_storage/__init__.py b/src/label_studio_sdk/export_storage/__init__.py index 0203a293b..635b53022 100644 --- a/src/label_studio_sdk/export_storage/__init__.py +++ b/src/label_studio_sdk/export_storage/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ExportStorageListTypesResponseItem from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse diff --git a/src/label_studio_sdk/export_storage/azure/__init__.py b/src/label_studio_sdk/export_storage/azure/__init__.py index 323fc5f3d..97dcea344 100644 --- a/src/label_studio_sdk/export_storage/azure/__init__.py +++ b/src/label_studio_sdk/export_storage/azure/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import AzureCreateResponse, AzureUpdateResponse __all__ = ["AzureCreateResponse", "AzureUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/azure/client.py b/src/label_studio_sdk/export_storage/azure/client.py index 4ef84db7f..c7314088a 100644 --- a/src/label_studio_sdk/export_storage/azure/client.py +++ b/src/label_studio_sdk/export_storage/azure/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_export_storage import AzureBlobExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawAzureClient, RawAzureClient from .types.azure_create_response import AzureCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class AzureClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAzureClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobExportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.export_storage.azure.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobExportStorage], - parse_obj_as( - type_=typing.List[AzureBlobExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -142,38 +128,18 @@ def create( ) client.export_storage.azure.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -238,33 +204,19 @@ def validate( ) client.export_storage.azure.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/azure/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -297,24 +249,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -346,18 +282,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -427,38 +353,19 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -492,35 +399,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncAzureClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAzureClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobExportStorage]: """ @@ -560,27 +459,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobExportStorage], - parse_obj_as( - type_=typing.List[AzureBlobExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -654,38 +534,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -758,33 +618,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/azure/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -825,24 +671,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -882,18 +712,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -971,38 +791,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobExportStorage: """ @@ -1044,21 +845,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobExportStorage, - parse_obj_as( - type_=AzureBlobExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/azure/raw_client.py b/src/label_studio_sdk/export_storage/azure/raw_client.py new file mode 100644 index 000000000..12ef87da6 --- /dev/null +++ b/src/label_studio_sdk/export_storage/azure/raw_client.py @@ -0,0 +1,881 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.azure_blob_export_storage import AzureBlobExportStorage +from .types.azure_create_response import AzureCreateResponse +from .types.azure_update_response import AzureUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAzureClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[AzureBlobExportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[AzureBlobExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureCreateResponse]: + """ + + Create a new target storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobExportStorage]: + """ + + Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobExportStorage]: + """ + + Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawAzureClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[AzureBlobExportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[AzureBlobExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobExportStorage], + parse_obj_as( + type_=typing.List[AzureBlobExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureCreateResponse]: + """ + + Create a new target storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Azure export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobExportStorage]: + """ + + Get a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobExportStorage]: + """ + + Sync tasks to an Azure export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobExportStorage, + parse_obj_as( + type_=AzureBlobExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/azure/types/__init__.py b/src/label_studio_sdk/export_storage/azure/types/__init__.py index 0cb2cdbbc..e56fb71c1 100644 --- a/src/label_studio_sdk/export_storage/azure/types/__init__.py +++ b/src/label_studio_sdk/export_storage/azure/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .azure_create_response import AzureCreateResponse from .azure_update_response import AzureUpdateResponse diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py index d82c30787..1f374501b 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py index e530bafb9..286118f96 100644 --- a/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/export_storage/azure/types/azure_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/client.py b/src/label_studio_sdk/export_storage/client.py index ac11dab4a..d0158fe01 100644 --- a/src/label_studio_sdk/export_storage/client.py +++ b/src/label_studio_sdk/export_storage/client.py @@ -1,38 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.client_wrapper import SyncClientWrapper -from .azure.client import AzureClient -from .gcs.client import GcsClient -from .local.client import LocalClient -from .redis.client import RedisClient -from .s3.client import S3Client -from .s3s.client import S3SClient import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from .types.export_storage_list_types_response_item import ( - ExportStorageListTypesResponseItem, -) -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper -from .azure.client import AsyncAzureClient -from .gcs.client import AsyncGcsClient -from .local.client import AsyncLocalClient -from .redis.client import AsyncRedisClient -from .s3.client import AsyncS3Client -from .s3s.client import AsyncS3SClient +from .azure.client import AsyncAzureClient, AzureClient +from .gcs.client import AsyncGcsClient, GcsClient +from .local.client import AsyncLocalClient, LocalClient +from .raw_client import AsyncRawExportStorageClient, RawExportStorageClient +from .redis.client import AsyncRedisClient, RedisClient +from .s3.client import AsyncS3Client, S3Client +from .s3s.client import AsyncS3SClient, S3SClient +from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem class ExportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AzureClient(client_wrapper=self._client_wrapper) - self.gcs = GcsClient(client_wrapper=self._client_wrapper) - self.local = LocalClient(client_wrapper=self._client_wrapper) - self.redis = RedisClient(client_wrapper=self._client_wrapper) - self.s3 = S3Client(client_wrapper=self._client_wrapper) - self.s3s = S3SClient(client_wrapper=self._client_wrapper) + self._raw_client = RawExportStorageClient(client_wrapper=client_wrapper) + self.azure = AzureClient(client_wrapper=client_wrapper) + + self.gcs = GcsClient(client_wrapper=client_wrapper) + + self.local = LocalClient(client_wrapper=client_wrapper) + + self.redis = RedisClient(client_wrapper=client_wrapper) + + self.s3 = S3Client(client_wrapper=client_wrapper) + + self.s3s = S3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawExportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawExportStorageClient + """ + return self._raw_client def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -59,35 +65,35 @@ def list_types( ) client.export_storage.list_types() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list_types(request_options=request_options) + return _response.data class AsyncExportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) - self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) - self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) - self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) - self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) - self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawExportStorageClient(client_wrapper=client_wrapper) + self.azure = AsyncAzureClient(client_wrapper=client_wrapper) + + self.gcs = AsyncGcsClient(client_wrapper=client_wrapper) + + self.local = AsyncLocalClient(client_wrapper=client_wrapper) + + self.redis = AsyncRedisClient(client_wrapper=client_wrapper) + + self.s3 = AsyncS3Client(client_wrapper=client_wrapper) + + self.s3s = AsyncS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawExportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawExportStorageClient + """ + return self._raw_client async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -122,21 +128,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_types(request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/gcs/__init__.py b/src/label_studio_sdk/export_storage/gcs/__init__.py index 2c4b3d376..7054c2af9 100644 --- a/src/label_studio_sdk/export_storage/gcs/__init__.py +++ b/src/label_studio_sdk/export_storage/gcs/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import GcsCreateResponse, GcsUpdateResponse __all__ = ["GcsCreateResponse", "GcsUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/gcs/client.py b/src/label_studio_sdk/export_storage/gcs/client.py index d077d9d8c..e1ac120d2 100644 --- a/src/label_studio_sdk/export_storage/gcs/client.py +++ b/src/label_studio_sdk/export_storage/gcs/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_export_storage import GcsExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawGcsClient, RawGcsClient from .types.gcs_create_response import GcsCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class GcsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawGcsClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsExportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.export_storage.gcs.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsExportStorage], - parse_obj_as( - type_=typing.List[GcsExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -142,38 +128,18 @@ def create( ) client.export_storage.gcs.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -238,33 +204,19 @@ def validate( ) client.export_storage.gcs.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/gcs/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -297,24 +249,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -346,18 +282,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -427,38 +353,19 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -492,35 +399,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncGcsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawGcsClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsExportStorage]: """ @@ -560,27 +459,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsExportStorage], - parse_obj_as( - type_=typing.List[GcsExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -654,38 +534,18 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -758,33 +618,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/gcs/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -825,24 +671,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -882,18 +712,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -971,38 +791,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsExportStorage: """ @@ -1044,21 +845,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsExportStorage, - parse_obj_as( - type_=GcsExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/gcs/raw_client.py b/src/label_studio_sdk/export_storage/gcs/raw_client.py new file mode 100644 index 000000000..700e69127 --- /dev/null +++ b/src/label_studio_sdk/export_storage/gcs/raw_client.py @@ -0,0 +1,881 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.gcs_export_storage import GcsExportStorage +from .types.gcs_create_response import GcsCreateResponse +from .types.gcs_update_response import GcsUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawGcsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[GcsExportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[GcsExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsCreateResponse]: + """ + + Create a new target storage connection to Google Cloud Storage. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcs/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsExportStorage]: + """ + + Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsExportStorage]: + """ + + Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawGcsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[GcsExportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all GCS export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[GcsExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsExportStorage], + parse_obj_as( + type_=typing.List[GcsExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsCreateResponse]: + """ + + Create a new target storage connection to Google Cloud Storage. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific GCS export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcs/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsExportStorage]: + """ + + Get a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsExportStorage]: + """ + + Sync tasks to a GCS export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsExportStorage, + parse_obj_as( + type_=GcsExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/gcs/types/__init__.py b/src/label_studio_sdk/export_storage/gcs/types/__init__.py index 832c1ee1c..73d049459 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/__init__.py +++ b/src/label_studio_sdk/export_storage/gcs/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .gcs_create_response import GcsCreateResponse from .gcs_update_response import GcsUpdateResponse diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py index 955c9d0cf..f4c6c63a0 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py index 48f05d0f3..86860ea58 100644 --- a/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/export_storage/gcs/types/gcs_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/local/__init__.py b/src/label_studio_sdk/export_storage/local/__init__.py index 248109b66..44e4524e0 100644 --- a/src/label_studio_sdk/export_storage/local/__init__.py +++ b/src/label_studio_sdk/export_storage/local/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import LocalCreateResponse, LocalUpdateResponse __all__ = ["LocalCreateResponse", "LocalUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/local/client.py b/src/label_studio_sdk/export_storage/local/client.py index cf37f1c04..3201ddb45 100644 --- a/src/label_studio_sdk/export_storage/local/client.py +++ b/src/label_studio_sdk/export_storage/local/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_export_storage import LocalFilesExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawLocalClient, RawLocalClient from .types.local_create_response import LocalCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class LocalClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawLocalClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesExportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.export_storage.local.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesExportStorage], - parse_obj_as( - type_=typing.List[LocalFilesExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -134,36 +120,16 @@ def create( ) client.export_storage.local.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -220,31 +186,17 @@ def validate( ) client.export_storage.local.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -277,24 +229,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -326,18 +262,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -399,36 +325,17 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -462,35 +369,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncLocalClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawLocalClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesExportStorage]: """ @@ -530,27 +429,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesExportStorage], - parse_obj_as( - type_=typing.List[LocalFilesExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -616,36 +496,16 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -710,31 +570,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesExportStorage: """ @@ -775,24 +621,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -832,18 +662,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -913,36 +733,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -986,21 +787,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesExportStorage, - parse_obj_as( - type_=LocalFilesExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/local/raw_client.py b/src/label_studio_sdk/export_storage/local/raw_client.py new file mode 100644 index 000000000..1019d32ec --- /dev/null +++ b/src/label_studio_sdk/export_storage/local/raw_client.py @@ -0,0 +1,821 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.local_files_export_storage import LocalFilesExportStorage +from .types.local_create_response import LocalCreateResponse +from .types.local_update_response import LocalUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawLocalClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[LocalFilesExportStorage]]: + """ + + You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[LocalFilesExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalCreateResponse]: + """ + + Create a new target storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesExportStorage]: + """ + + Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalUpdateResponse]: + """ + + Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesExportStorage]: + """ + + Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawLocalClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[LocalFilesExportStorage]]: + """ + + You can connect a local file directory to Label Studio as a source storage or target storage. Use this API request to get a list of all local file export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[LocalFilesExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesExportStorage], + parse_obj_as( + type_=typing.List[LocalFilesExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalCreateResponse]: + """ + + Create a new target storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific local file export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesExportStorage]: + """ + + Get a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalUpdateResponse]: + """ + + Update a specific local file export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files export storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesExportStorage]: + """ + + Sync tasks to an local file export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external local file directories only go one way. They either create tasks from objects in the directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesExportStorage, + parse_obj_as( + type_=LocalFilesExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/local/types/__init__.py b/src/label_studio_sdk/export_storage/local/types/__init__.py index 9a12e8745..5f88d9245 100644 --- a/src/label_studio_sdk/export_storage/local/types/__init__.py +++ b/src/label_studio_sdk/export_storage/local/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .local_create_response import LocalCreateResponse from .local_update_response import LocalUpdateResponse diff --git a/src/label_studio_sdk/export_storage/local/types/local_create_response.py b/src/label_studio_sdk/export_storage/local/types/local_create_response.py index 95051747a..4f45cad89 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/local/types/local_update_response.py b/src/label_studio_sdk/export_storage/local/types/local_update_response.py index e5dd8df6c..885189c74 100644 --- a/src/label_studio_sdk/export_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/export_storage/local/types/local_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/raw_client.py b/src/label_studio_sdk/export_storage/raw_client.py new file mode 100644 index 000000000..d843d01dc --- /dev/null +++ b/src/label_studio_sdk/export_storage/raw_client.py @@ -0,0 +1,93 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from .types.export_storage_list_types_response_item import ExportStorageListTypesResponseItem + + +class RawExportStorageClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ExportStorageListTypesResponseItem]]: + """ + Retrieve a list of the export storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ExportStorageListTypesResponseItem]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawExportStorageClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ExportStorageListTypesResponseItem]]: + """ + Retrieve a list of the export storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ExportStorageListTypesResponseItem]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ExportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/redis/__init__.py b/src/label_studio_sdk/export_storage/redis/__init__.py index 7f87f18fe..e52cb2ace 100644 --- a/src/label_studio_sdk/export_storage/redis/__init__.py +++ b/src/label_studio_sdk/export_storage/redis/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import RedisCreateResponse, RedisUpdateResponse __all__ = ["RedisCreateResponse", "RedisUpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/redis/client.py b/src/label_studio_sdk/export_storage/redis/client.py index 03c8b42e5..6a2ba5f52 100644 --- a/src/label_studio_sdk/export_storage/redis/client.py +++ b/src/label_studio_sdk/export_storage/redis/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_export_storage import RedisExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawRedisClient, RawRedisClient from .types.redis_create_response import RedisCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class RedisClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawRedisClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisExportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.export_storage.redis.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisExportStorage], - parse_obj_as( - type_=typing.List[RedisExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -146,39 +132,19 @@ def create( ) client.export_storage.redis.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="POST", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -247,34 +213,20 @@ def validate( ) client.export_storage.redis.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/redis/validate", - method="POST", - json={ - "id": id, - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -307,24 +259,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -356,18 +292,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -441,39 +367,20 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -507,35 +414,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncRedisClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawRedisClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisExportStorage]: """ @@ -575,27 +474,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisExportStorage], - parse_obj_as( - type_=typing.List[RedisExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -673,39 +553,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis", - method="POST", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -782,34 +642,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/redis/validate", - method="POST", - json={ - "id": id, - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -850,24 +696,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -907,18 +737,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1000,39 +820,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "db": db, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + db=db, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisExportStorage: """ @@ -1074,21 +875,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisExportStorage, - parse_obj_as( - type_=RedisExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/redis/raw_client.py b/src/label_studio_sdk/export_storage/redis/raw_client.py new file mode 100644 index 000000000..707afb81e --- /dev/null +++ b/src/label_studio_sdk/export_storage/redis/raw_client.py @@ -0,0 +1,911 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.redis_export_storage import RedisExportStorage +from .types.redis_create_response import RedisCreateResponse +from .types.redis_update_response import RedisUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawRedisClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[RedisExportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[RedisExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisCreateResponse]: + """ + + Create a new target storage connection to Redis. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="POST", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/redis/validate", + method="POST", + json={ + "id": id, + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisExportStorage]: + """ + + Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisExportStorage]: + """ + + Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawRedisClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[RedisExportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[RedisExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisExportStorage], + parse_obj_as( + type_=typing.List[RedisExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisCreateResponse]: + """ + + Create a new target storage connection to Redis. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis", + method="POST", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Redis export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/redis/validate", + method="POST", + json={ + "id": id, + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisExportStorage]: + """ + + Get a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + db: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis export storage. + + db : typing.Optional[int] + Database ID of database to use + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "db": db, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisExportStorage]: + """ + + Sync tasks to an Redis export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisExportStorage, + parse_obj_as( + type_=RedisExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/redis/types/__init__.py b/src/label_studio_sdk/export_storage/redis/types/__init__.py index b3557bc08..aea7ed291 100644 --- a/src/label_studio_sdk/export_storage/redis/types/__init__.py +++ b/src/label_studio_sdk/export_storage/redis/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .redis_create_response import RedisCreateResponse from .redis_update_response import RedisUpdateResponse diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py index 7aab4a479..c57c0ace4 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py index 8eec3c821..7787c488a 100644 --- a/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/export_storage/redis/types/redis_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3/__init__.py b/src/label_studio_sdk/export_storage/s3/__init__.py index c749fe227..e6421caaf 100644 --- a/src/label_studio_sdk/export_storage/s3/__init__.py +++ b/src/label_studio_sdk/export_storage/s3/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import S3CreateResponse, S3UpdateResponse __all__ = ["S3CreateResponse", "S3UpdateResponse"] diff --git a/src/label_studio_sdk/export_storage/s3/client.py b/src/label_studio_sdk/export_storage/s3/client.py index 9b98d193a..d32dc2b76 100644 --- a/src/label_studio_sdk/export_storage/s3/client.py +++ b/src/label_studio_sdk/export_storage/s3/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3export_storage import S3ExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawS3Client, RawS3Client from .types.s3create_response import S3CreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class S3Client: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3Client + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ExportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.export_storage.s3.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ExportStorage], - parse_obj_as( - type_=typing.List[S3ExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -158,42 +144,22 @@ def create( ) client.export_storage.s3.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -274,37 +240,23 @@ def validate( ) client.export_storage.s3.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -337,24 +289,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -386,18 +322,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -483,42 +409,23 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -552,35 +459,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncS3Client: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3Client + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ExportStorage]: """ @@ -620,27 +519,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ExportStorage], - parse_obj_as( - type_=typing.List[S3ExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -730,42 +610,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -854,37 +714,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3/validate", - method="POST", - json={ - "id": id, - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -925,24 +771,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -982,18 +812,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1087,42 +907,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ExportStorage: """ @@ -1164,21 +965,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ExportStorage, - parse_obj_as( - type_=S3ExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/export_storage/s3/raw_client.py b/src/label_studio_sdk/export_storage/s3/raw_client.py new file mode 100644 index 000000000..ed6014980 --- /dev/null +++ b/src/label_studio_sdk/export_storage/s3/raw_client.py @@ -0,0 +1,999 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3export_storage import S3ExportStorage +from .types.s3create_response import S3CreateResponse +from .types.s3update_response import S3UpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3Client: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3ExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3ExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3CreateResponse]: + """ + + Create a new target storage connection to S3 storage. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3CreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[S3ExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3UpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3ExportStorage]: + """ + + Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3Client: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3ExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3ExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ExportStorage], + parse_obj_as( + type_=typing.List[S3ExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3CreateResponse]: + """ + + Create a new target storage connection to S3 storage. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be exported until you [sync your connection](sync). + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3CreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3/validate", + method="POST", + json={ + "id": id, + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Deleting an export/target storage connection does not affect tasks with synced data in Label Studio. If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 export storage. + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3UpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ExportStorage]: + """ + + Sync tasks to an S3 export/target storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ExportStorage, + parse_obj_as( + type_=S3ExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/s3/types/__init__.py b/src/label_studio_sdk/export_storage/s3/types/__init__.py index 3cc20ce7b..bb333983a 100644 --- a/src/label_studio_sdk/export_storage/s3/types/__init__.py +++ b/src/label_studio_sdk/export_storage/s3/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .s3create_response import S3CreateResponse from .s3update_response import S3UpdateResponse diff --git a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py index cc74a5583..ff883c191 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3create_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3CreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py index 466c2b64c..c3ee36914 100644 --- a/src/label_studio_sdk/export_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/export_storage/s3/types/s3update_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3UpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/export_storage/s3s/__init__.py b/src/label_studio_sdk/export_storage/s3s/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/export_storage/s3s/__init__.py +++ b/src/label_studio_sdk/export_storage/s3s/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/export_storage/s3s/client.py b/src/label_studio_sdk/export_storage/s3s/client.py index eba7a105f..6dde0d936 100644 --- a/src/label_studio_sdk/export_storage/s3s/client.py +++ b/src/label_studio_sdk/export_storage/s3s/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3s_export_storage import S3SExportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...core.jsonable_encoder import jsonable_encoder -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawS3SClient, RawS3SClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,13 +13,21 @@ class S3SClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3SClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SExportStorage]: """ @@ -54,27 +59,8 @@ def list( ) client.export_storage.s3s.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SExportStorage], - parse_obj_as( - type_=typing.List[S3SExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -146,40 +132,20 @@ def create( ) client.export_storage.s3s.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ @@ -210,24 +176,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -257,18 +207,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -344,40 +284,21 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -446,45 +367,39 @@ def validate( ) client.export_storage.s3s.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/export/s3s/validate", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncS3SClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3SClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SExportStorage]: """ @@ -524,27 +439,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SExportStorage], - parse_obj_as( - type_=typing.List[S3SExportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -624,40 +520,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SExportStorage: """ @@ -696,24 +572,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -751,18 +611,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -846,40 +696,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/export/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SExportStorage, - parse_obj_as( - type_=S3SExportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -956,31 +787,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/export/s3s/validate", - method="POST", - json={ - "can_delete_objects": can_delete_objects, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + can_delete_objects=can_delete_objects, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/export_storage/s3s/raw_client.py b/src/label_studio_sdk/export_storage/s3s/raw_client.py new file mode 100644 index 000000000..c230c2660 --- /dev/null +++ b/src/label_studio_sdk/export_storage/s3s/raw_client.py @@ -0,0 +1,827 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3s_export_storage import S3SExportStorage + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3SClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3SExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3SExportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SExportStorage]: + """ + + Create a new target storage connection to a S3 bucket with IAM role access. + + For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3SExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SExportStorage]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SExportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3SClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3SExportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all S3 export (target) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3SExportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SExportStorage], + parse_obj_as( + type_=typing.List[S3SExportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SExportStorage]: + """ + + Create a new target storage connection to a S3 bucket with IAM role access. + + For information about the required fields and prerequisites, see [Amazon S3](https://docs.humansignal.com/guide/storage#Set-up-an-S3-connection-with-IAM-role-access) in the Label Studio documentation. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3SExportStorage]: + """ + + Get a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SExportStorage]: + """ + + Update a specific S3 export storage connection. You will need to provide the export storage ID. You can find this using [List export storages](list). + + Parameters + ---------- + id : int + Export storage ID + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SExportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SExportStorage, + parse_obj_as( + type_=S3SExportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + can_delete_objects: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 export storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to export data. + + Parameters + ---------- + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/s3s/validate", + method="POST", + json={ + "can_delete_objects": can_delete_objects, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/export_storage/types/__init__.py b/src/label_studio_sdk/export_storage/types/__init__.py index 58de62c07..1539a9784 100644 --- a/src/label_studio_sdk/export_storage/types/__init__.py +++ b/src/label_studio_sdk/export_storage/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .export_storage_list_types_response_item import ExportStorageListTypesResponseItem __all__ = ["ExportStorageListTypesResponseItem"] diff --git a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py index 684e9172a..50f3659ad 100644 --- a/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py +++ b/src/label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ExportStorageListTypesResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/files/__init__.py b/src/label_studio_sdk/files/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/files/__init__.py +++ b/src/label_studio_sdk/files/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/files/client.py b/src/label_studio_sdk/files/client.py index a832258c3..070e61c0e 100644 --- a/src/label_studio_sdk/files/client.py +++ b/src/label_studio_sdk/files/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.file_upload import FileUpload -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawFilesClient, RawFilesClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class FilesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawFilesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawFilesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawFilesClient + """ + return self._raw_client def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> FileUpload: """ @@ -46,24 +54,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -92,18 +84,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -119,7 +101,7 @@ def update( You will need to include the file data in the request body. For example: ```bash - curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/import/file-upload/245' -F 'file=@path/to/my_file.csv' ``` Parameters @@ -150,29 +132,8 @@ def update( id_=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id_)}", - method="PATCH", - json={ - "id": id, - "file": file, - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.update(id_, id=id, file=file, request_options=request_options) + return _response.data def list( self, @@ -218,28 +179,8 @@ def list( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="GET", - params={ - "all": all_, - "ids": ids, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[FileUpload], - parse_obj_as( - type_=typing.List[FileUpload], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, all_=all_, ids=ids, request_options=request_options) + return _response.data def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -271,18 +212,8 @@ def delete_many(self, id: int, *, request_options: typing.Optional[RequestOption id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete_many(id, request_options=request_options) + return _response.data def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -310,23 +241,24 @@ def download(self, filename: str, *, request_options: typing.Optional[RequestOpt filename="filename", ) """ - _response = self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.download(filename, request_options=request_options) + return _response.data class AsyncFilesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawFilesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawFilesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawFilesClient + """ + return self._raw_client async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> FileUpload: """ @@ -364,24 +296,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -418,18 +334,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -445,7 +351,7 @@ async def update( You will need to include the file data in the request body. For example: ```bash - curl -H 'Authorization: Token abc123' \ -X POST 'https://localhost:8080/api/import/file-upload/245' -F ‘file=@path/to/my_file.csv’ + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/import/file-upload/245' -F 'file=@path/to/my_file.csv' ``` Parameters @@ -484,29 +390,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/import/file-upload/{jsonable_encoder(id_)}", - method="PATCH", - json={ - "id": id, - "file": file, - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - FileUpload, - parse_obj_as( - type_=FileUpload, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.update(id_, id=id, file=file, request_options=request_options) + return _response.data async def list( self, @@ -560,28 +445,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="GET", - params={ - "all": all_, - "ids": ids, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[FileUpload], - parse_obj_as( - type_=typing.List[FileUpload], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, all_=all_, ids=ids, request_options=request_options) + return _response.data async def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -621,18 +486,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/file-uploads", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete_many(id, request_options=request_options) + return _response.data async def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -668,15 +523,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"data/upload/{jsonable_encoder(filename)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.download(filename, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/files/raw_client.py b/src/label_studio_sdk/files/raw_client.py new file mode 100644 index 000000000..9a39cdd6d --- /dev/null +++ b/src/label_studio_sdk/files/raw_client.py @@ -0,0 +1,523 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.file_upload import FileUpload + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawFilesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[FileUpload]: + """ + Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[FileUpload] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id_: int, + *, + id: typing.Optional[int] = OMIT, + file: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[FileUpload]: + """ + + Update a specific uploaded file. To get the file upload ID, use [Get files list](list). + + You will need to include the file data in the request body. For example: + ```bash + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/import/file-upload/245' -F 'file=@path/to/my_file.csv' + ``` + + Parameters + ---------- + id_ : int + A unique integer value identifying this file upload. + + id : typing.Optional[int] + + file : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[FileUpload] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id_)}", + method="PATCH", + json={ + "id": id, + "file": file, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, + id: int, + *, + all_: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[FileUpload]]: + """ + + Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + all_ : typing.Optional[bool] + Set to "true" if you want to retrieve all file uploads + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + Specify the list of file upload IDs to retrieve, e.g. ids=[1,2,3] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[FileUpload]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="GET", + params={ + "all": all_, + "ids": ids, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete_many(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def download(self, filename: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. + + Parameters + ---------- + filename : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawFilesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[FileUpload]: + """ + Retrieve details about a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[FileUpload] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a specific uploaded file. To get the file upload ID, use [Get files list](list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id_: int, + *, + id: typing.Optional[int] = OMIT, + file: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[FileUpload]: + """ + + Update a specific uploaded file. To get the file upload ID, use [Get files list](list). + + You will need to include the file data in the request body. For example: + ```bash + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/import/file-upload/245' -F 'file=@path/to/my_file.csv' + ``` + + Parameters + ---------- + id_ : int + A unique integer value identifying this file upload. + + id : typing.Optional[int] + + file : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[FileUpload] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/import/file-upload/{jsonable_encoder(id_)}", + method="PATCH", + json={ + "id": id, + "file": file, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + FileUpload, + parse_obj_as( + type_=FileUpload, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, + id: int, + *, + all_: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[FileUpload]]: + """ + + Retrieve the list of uploaded files used to create labeling tasks for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + all_ : typing.Optional[bool] + Set to "true" if you want to retrieve all file uploads + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + Specify the list of file upload IDs to retrieve, e.g. ids=[1,2,3] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[FileUpload]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="GET", + params={ + "all": all_, + "ids": ids, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[FileUpload], + parse_obj_as( + type_=typing.List[FileUpload], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete_many( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete uploaded files for a specific project. These are files that have been uploaded directly to Label Studio. + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this file upload. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/file-uploads", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def download( + self, filename: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Download a specific uploaded file. If you aren't sure of the file name, try [Get files list](list) first. + + Parameters + ---------- + filename : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"data/upload/{jsonable_encoder(filename)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/__init__.py b/src/label_studio_sdk/import_storage/__init__.py index 51599b165..b2e5e5130 100644 --- a/src/label_studio_sdk/import_storage/__init__.py +++ b/src/label_studio_sdk/import_storage/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ImportStorageListTypesResponseItem from . import azure, gcs, local, redis, s3, s3s from .azure import AzureCreateResponse, AzureUpdateResponse diff --git a/src/label_studio_sdk/import_storage/azure/__init__.py b/src/label_studio_sdk/import_storage/azure/__init__.py index 323fc5f3d..97dcea344 100644 --- a/src/label_studio_sdk/import_storage/azure/__init__.py +++ b/src/label_studio_sdk/import_storage/azure/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import AzureCreateResponse, AzureUpdateResponse __all__ = ["AzureCreateResponse", "AzureUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/azure/client.py b/src/label_studio_sdk/import_storage/azure/client.py index be43dd3da..2b70c67fe 100644 --- a/src/label_studio_sdk/import_storage/azure/client.py +++ b/src/label_studio_sdk/import_storage/azure/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.azure_blob_import_storage import AzureBlobImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawAzureClient, RawAzureClient from .types.azure_create_response import AzureCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.azure_update_response import AzureUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class AzureClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawAzureClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobImportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.import_storage.azure.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobImportStorage], - parse_obj_as( - type_=typing.List[AzureBlobImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -156,41 +142,21 @@ def create( ) client.import_storage.azure.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -267,36 +233,22 @@ def validate( ) client.import_storage.azure.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/azure/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -329,24 +281,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -380,18 +316,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -473,41 +399,22 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -542,35 +449,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncAzureClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawAzureClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawAzureClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawAzureClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[AzureBlobImportStorage]: """ @@ -610,27 +509,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[AzureBlobImportStorage], - parse_obj_as( - type_=typing.List[AzureBlobImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -718,41 +598,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureCreateResponse, - parse_obj_as( - type_=AzureCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -837,36 +697,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/azure/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -907,24 +753,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -966,18 +796,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1067,41 +887,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "container": container, - "prefix": prefix, - "account_name": account_name, - "account_key": account_key, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + container=container, + prefix=prefix, + account_name=account_name, + account_key=account_key, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureUpdateResponse, - parse_obj_as( - type_=AzureUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> AzureBlobImportStorage: """ @@ -1144,21 +945,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/azure/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AzureBlobImportStorage, - parse_obj_as( - type_=AzureBlobImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/azure/raw_client.py b/src/label_studio_sdk/import_storage/azure/raw_client.py new file mode 100644 index 000000000..6ebcfaff5 --- /dev/null +++ b/src/label_studio_sdk/import_storage/azure/raw_client.py @@ -0,0 +1,981 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.azure_blob_import_storage import AzureBlobImportStorage +from .types.azure_create_response import AzureCreateResponse +from .types.azure_update_response import AzureUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawAzureClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[AzureBlobImportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[AzureBlobImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureCreateResponse]: + """ + + Create a new source storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobImportStorage]: + """ + + Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AzureBlobImportStorage]: + """ + + Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AzureBlobImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawAzureClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[AzureBlobImportStorage]]: + """ + + You can connect your Microsoft Azure Blob storage container to Label Studio as a source storage or target storage. Use this API request to get a list of all Azure import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[AzureBlobImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[AzureBlobImportStorage], + parse_obj_as( + type_=typing.List[AzureBlobImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureCreateResponse]: + """ + + Create a new source storage connection to Microsoft Azure Blob storage. + + For information about the required fields and prerequisites, see [Microsoft Azure Blob storage](https://labelstud.io/guide/storage#Microsoft-Azure-Blob-storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureCreateResponse, + parse_obj_as( + type_=AzureCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Azure import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobImportStorage]: + """ + + Get a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + container: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + account_name: typing.Optional[str] = OMIT, + account_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[AzureUpdateResponse]: + """ + + Update a specific Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this azure blob import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + container : typing.Optional[str] + Azure blob container + + prefix : typing.Optional[str] + Azure blob prefix name + + account_name : typing.Optional[str] + Azure Blob account name + + account_key : typing.Optional[str] + Azure Blob account key + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "container": container, + "prefix": prefix, + "account_name": account_name, + "account_key": account_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureUpdateResponse, + parse_obj_as( + type_=AzureUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AzureBlobImportStorage]: + """ + + Sync tasks from an Azure import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external containers only go one way. They either create tasks from objects in the container (source/import storage) or push annotations to the output container (export/target storage). Changing something on the Microsoft side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AzureBlobImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AzureBlobImportStorage, + parse_obj_as( + type_=AzureBlobImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/azure/types/__init__.py b/src/label_studio_sdk/import_storage/azure/types/__init__.py index 0cb2cdbbc..e56fb71c1 100644 --- a/src/label_studio_sdk/import_storage/azure/types/__init__.py +++ b/src/label_studio_sdk/import_storage/azure/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .azure_create_response import AzureCreateResponse from .azure_update_response import AzureUpdateResponse diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py index b59cf0117..b6cd9028b 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py index afacbeb28..207aa6d05 100644 --- a/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py +++ b/src/label_studio_sdk/import_storage/azure/types/azure_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AzureUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/client.py b/src/label_studio_sdk/import_storage/client.py index c4a97e1ad..2fada220a 100644 --- a/src/label_studio_sdk/import_storage/client.py +++ b/src/label_studio_sdk/import_storage/client.py @@ -1,38 +1,44 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.client_wrapper import SyncClientWrapper -from .azure.client import AzureClient -from .gcs.client import GcsClient -from .local.client import LocalClient -from .redis.client import RedisClient -from .s3.client import S3Client -from .s3s.client import S3SClient import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from .types.import_storage_list_types_response_item import ( - ImportStorageListTypesResponseItem, -) -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper -from .azure.client import AsyncAzureClient -from .gcs.client import AsyncGcsClient -from .local.client import AsyncLocalClient -from .redis.client import AsyncRedisClient -from .s3.client import AsyncS3Client -from .s3s.client import AsyncS3SClient +from .azure.client import AsyncAzureClient, AzureClient +from .gcs.client import AsyncGcsClient, GcsClient +from .local.client import AsyncLocalClient, LocalClient +from .raw_client import AsyncRawImportStorageClient, RawImportStorageClient +from .redis.client import AsyncRedisClient, RedisClient +from .s3.client import AsyncS3Client, S3Client +from .s3s.client import AsyncS3SClient, S3SClient +from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem class ImportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AzureClient(client_wrapper=self._client_wrapper) - self.gcs = GcsClient(client_wrapper=self._client_wrapper) - self.local = LocalClient(client_wrapper=self._client_wrapper) - self.redis = RedisClient(client_wrapper=self._client_wrapper) - self.s3 = S3Client(client_wrapper=self._client_wrapper) - self.s3s = S3SClient(client_wrapper=self._client_wrapper) + self._raw_client = RawImportStorageClient(client_wrapper=client_wrapper) + self.azure = AzureClient(client_wrapper=client_wrapper) + + self.gcs = GcsClient(client_wrapper=client_wrapper) + + self.local = LocalClient(client_wrapper=client_wrapper) + + self.redis = RedisClient(client_wrapper=client_wrapper) + + self.s3 = S3Client(client_wrapper=client_wrapper) + + self.s3s = S3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawImportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawImportStorageClient + """ + return self._raw_client def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -59,35 +65,35 @@ def list_types( ) client.import_storage.list_types() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ImportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list_types(request_options=request_options) + return _response.data class AsyncImportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) - self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) - self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) - self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) - self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) - self.s3s = AsyncS3SClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawImportStorageClient(client_wrapper=client_wrapper) + self.azure = AsyncAzureClient(client_wrapper=client_wrapper) + + self.gcs = AsyncGcsClient(client_wrapper=client_wrapper) + + self.local = AsyncLocalClient(client_wrapper=client_wrapper) + + self.redis = AsyncRedisClient(client_wrapper=client_wrapper) + + self.s3 = AsyncS3Client(client_wrapper=client_wrapper) + + self.s3s = AsyncS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawImportStorageClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawImportStorageClient + """ + return self._raw_client async def list_types( self, *, request_options: typing.Optional[RequestOptions] = None @@ -122,21 +128,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/types", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ImportStorageListTypesResponseItem], - parse_obj_as( - type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_types(request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/gcs/__init__.py b/src/label_studio_sdk/import_storage/gcs/__init__.py index 2c4b3d376..7054c2af9 100644 --- a/src/label_studio_sdk/import_storage/gcs/__init__.py +++ b/src/label_studio_sdk/import_storage/gcs/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import GcsCreateResponse, GcsUpdateResponse __all__ = ["GcsCreateResponse", "GcsUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/gcs/client.py b/src/label_studio_sdk/import_storage/gcs/client.py index 9007140ab..aeca74bdc 100644 --- a/src/label_studio_sdk/import_storage/gcs/client.py +++ b/src/label_studio_sdk/import_storage/gcs/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.gcs_import_storage import GcsImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawGcsClient, RawGcsClient from .types.gcs_create_response import GcsCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.gcs_update_response import GcsUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class GcsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawGcsClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsImportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.import_storage.gcs.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsImportStorage], - parse_obj_as( - type_=typing.List[GcsImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -156,41 +142,21 @@ def create( ) client.import_storage.gcs.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -267,36 +233,22 @@ def validate( ) client.import_storage.gcs.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/gcs/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -329,24 +281,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -380,18 +316,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -473,41 +399,22 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -542,35 +449,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncGcsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawGcsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawGcsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawGcsClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[GcsImportStorage]: """ @@ -610,27 +509,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[GcsImportStorage], - parse_obj_as( - type_=typing.List[GcsImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -718,41 +598,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsCreateResponse, - parse_obj_as( - type_=GcsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -837,36 +697,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/gcs/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -907,24 +753,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -966,18 +796,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1067,41 +887,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "google_application_credentials": google_application_credentials, - "google_project_id": google_project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + google_application_credentials=google_application_credentials, + google_project_id=google_project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsUpdateResponse, - parse_obj_as( - type_=GcsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcsImportStorage: """ @@ -1144,21 +945,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/gcs/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - GcsImportStorage, - parse_obj_as( - type_=GcsImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/gcs/raw_client.py b/src/label_studio_sdk/import_storage/gcs/raw_client.py new file mode 100644 index 000000000..2e153f0f0 --- /dev/null +++ b/src/label_studio_sdk/import_storage/gcs/raw_client.py @@ -0,0 +1,981 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.gcs_import_storage import GcsImportStorage +from .types.gcs_create_response import GcsCreateResponse +from .types.gcs_update_response import GcsUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawGcsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[GcsImportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[GcsImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsCreateResponse]: + """ + + Create a new source storage connection to a Google Cloud Storage bucket. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcs/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsImportStorage]: + """ + + Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[GcsImportStorage]: + """ + + Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[GcsImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawGcsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[GcsImportStorage]]: + """ + + You can connect your Google Cloud Storage bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[GcsImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[GcsImportStorage], + parse_obj_as( + type_=typing.List[GcsImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsCreateResponse]: + """ + + Create a new source storage connection to a Google Cloud Storage bucket. + + For information about the required fields and prerequisites, see [Google Cloud Storage](https://labelstud.io/guide/storage#Google-Cloud-Storage) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsCreateResponse, + parse_obj_as( + type_=GcsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific GCS import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcs/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsImportStorage]: + """ + + Get a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[GcsUpdateResponse]: + """ + + Update a specific GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this gcs import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for direct download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + GCS bucket name + + prefix : typing.Optional[str] + GCS bucket prefix + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + + google_project_id : typing.Optional[str] + Google project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsUpdateResponse, + parse_obj_as( + type_=GcsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[GcsImportStorage]: + """ + + Sync tasks from a GCS import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[GcsImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcs/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + GcsImportStorage, + parse_obj_as( + type_=GcsImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/gcs/types/__init__.py b/src/label_studio_sdk/import_storage/gcs/types/__init__.py index 832c1ee1c..73d049459 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/__init__.py +++ b/src/label_studio_sdk/import_storage/gcs/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .gcs_create_response import GcsCreateResponse from .gcs_update_response import GcsUpdateResponse diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py index 58c05a731..7950f54b0 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py index 54c7e415c..96e92949b 100644 --- a/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py +++ b/src/label_studio_sdk/import_storage/gcs/types/gcs_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class GcsUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/local/__init__.py b/src/label_studio_sdk/import_storage/local/__init__.py index 248109b66..44e4524e0 100644 --- a/src/label_studio_sdk/import_storage/local/__init__.py +++ b/src/label_studio_sdk/import_storage/local/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import LocalCreateResponse, LocalUpdateResponse __all__ = ["LocalCreateResponse", "LocalUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/local/client.py b/src/label_studio_sdk/import_storage/local/client.py index fb51c56d2..08bd1b9ba 100644 --- a/src/label_studio_sdk/import_storage/local/client.py +++ b/src/label_studio_sdk/import_storage/local/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.local_files_import_storage import LocalFilesImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawLocalClient, RawLocalClient from .types.local_create_response import LocalCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.local_update_response import LocalUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class LocalClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawLocalClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesImportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.import_storage.local.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesImportStorage], - parse_obj_as( - type_=typing.List[LocalFilesImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -134,36 +120,16 @@ def create( ) client.import_storage.local.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -220,31 +186,17 @@ def validate( ) client.import_storage.local.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -277,24 +229,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -328,18 +264,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -401,36 +327,17 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -465,35 +372,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncLocalClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawLocalClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawLocalClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawLocalClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[LocalFilesImportStorage]: """ @@ -533,27 +432,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[LocalFilesImportStorage], - parse_obj_as( - type_=typing.List[LocalFilesImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -619,36 +499,16 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/", - method="POST", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalCreateResponse, - parse_obj_as( - type_=LocalCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -713,31 +573,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/localfiles/validate", - method="POST", - json={ - "id": id, - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> LocalFilesImportStorage: """ @@ -778,24 +624,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -837,18 +667,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -918,36 +738,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "project": project, - "path": path, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + project=project, + path=path, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalUpdateResponse, - parse_obj_as( - type_=LocalUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -992,21 +793,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/localfiles/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - LocalFilesImportStorage, - parse_obj_as( - type_=LocalFilesImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/local/raw_client.py b/src/label_studio_sdk/import_storage/local/raw_client.py new file mode 100644 index 000000000..e631df782 --- /dev/null +++ b/src/label_studio_sdk/import_storage/local/raw_client.py @@ -0,0 +1,827 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.local_files_import_storage import LocalFilesImportStorage +from .types.local_create_response import LocalCreateResponse +from .types.local_update_response import LocalUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawLocalClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[LocalFilesImportStorage]]: + """ + + If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[LocalFilesImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalCreateResponse]: + """ + + Create a new source storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesImportStorage]: + """ + + Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[LocalUpdateResponse]: + """ + + Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[LocalFilesImportStorage]: + """ + + Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[LocalFilesImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawLocalClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[LocalFilesImportStorage]]: + """ + + If you have local files that you want to add to Label Studio from a specific directory, you can set up a specific local directory on the machine where LS is running as source or target storage. Use this API request to get a list of all local file import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[LocalFilesImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[LocalFilesImportStorage], + parse_obj_as( + type_=typing.List[LocalFilesImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalCreateResponse]: + """ + + Create a new source storage connection to a local file directory. + + For information about the required fields and prerequisites, see [Local storage](https://labelstud.io/guide/storage#Local-storage) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/", + method="POST", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalCreateResponse, + parse_obj_as( + type_=LocalCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific local file import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/localfiles/validate", + method="POST", + json={ + "id": id, + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesImportStorage]: + """ + + Get a specific local file import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[LocalUpdateResponse]: + """ + + Update a specific local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this local files import storage. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Path to local directory + + regex_filter : typing.Optional[str] + Regex for filtering objects + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "project": project, + "path": path, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalUpdateResponse, + parse_obj_as( + type_=LocalUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[LocalFilesImportStorage]: + """ + + Sync tasks from a local import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external sources only go one way. They either create tasks from objects in the source directory (source/import storage) or push annotations to the output directory (export/target storage). Changing something on the local file side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[LocalFilesImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/localfiles/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + LocalFilesImportStorage, + parse_obj_as( + type_=LocalFilesImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/local/types/__init__.py b/src/label_studio_sdk/import_storage/local/types/__init__.py index 9a12e8745..5f88d9245 100644 --- a/src/label_studio_sdk/import_storage/local/types/__init__.py +++ b/src/label_studio_sdk/import_storage/local/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .local_create_response import LocalCreateResponse from .local_update_response import LocalUpdateResponse diff --git a/src/label_studio_sdk/import_storage/local/types/local_create_response.py b/src/label_studio_sdk/import_storage/local/types/local_create_response.py index 95051747a..4f45cad89 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_create_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/local/types/local_update_response.py b/src/label_studio_sdk/import_storage/local/types/local_update_response.py index e5dd8df6c..885189c74 100644 --- a/src/label_studio_sdk/import_storage/local/types/local_update_response.py +++ b/src/label_studio_sdk/import_storage/local/types/local_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class LocalUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/raw_client.py b/src/label_studio_sdk/import_storage/raw_client.py new file mode 100644 index 000000000..71f3ffbe2 --- /dev/null +++ b/src/label_studio_sdk/import_storage/raw_client.py @@ -0,0 +1,93 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from .types.import_storage_list_types_response_item import ImportStorageListTypesResponseItem + + +class RawImportStorageClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ImportStorageListTypesResponseItem]]: + """ + Retrieve a list of the import storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ImportStorageListTypesResponseItem]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawImportStorageClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list_types( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ImportStorageListTypesResponseItem]]: + """ + Retrieve a list of the import storages types. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ImportStorageListTypesResponseItem]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/types", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ImportStorageListTypesResponseItem], + parse_obj_as( + type_=typing.List[ImportStorageListTypesResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/redis/__init__.py b/src/label_studio_sdk/import_storage/redis/__init__.py index 7f87f18fe..e52cb2ace 100644 --- a/src/label_studio_sdk/import_storage/redis/__init__.py +++ b/src/label_studio_sdk/import_storage/redis/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import RedisCreateResponse, RedisUpdateResponse __all__ = ["RedisCreateResponse", "RedisUpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/redis/client.py b/src/label_studio_sdk/import_storage/redis/client.py index 543b2740c..fa8ac2684 100644 --- a/src/label_studio_sdk/import_storage/redis/client.py +++ b/src/label_studio_sdk/import_storage/redis/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.redis_import_storage import RedisImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawRedisClient, RawRedisClient from .types.redis_create_response import RedisCreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.redis_update_response import RedisUpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class RedisClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawRedisClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisImportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.import_storage.redis.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisImportStorage], - parse_obj_as( - type_=typing.List[RedisImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -146,39 +132,19 @@ def create( ) client.import_storage.redis.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -247,34 +213,20 @@ def validate( ) client.import_storage.redis.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/redis/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -307,24 +259,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -358,18 +294,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -443,39 +369,20 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -510,35 +417,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncRedisClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawRedisClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawRedisClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawRedisClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[RedisImportStorage]: """ @@ -578,27 +477,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[RedisImportStorage], - parse_obj_as( - type_=typing.List[RedisImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -676,39 +556,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisCreateResponse, - parse_obj_as( - type_=RedisCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -785,34 +645,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/redis/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -853,24 +699,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -912,18 +742,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1005,39 +825,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "title": title, - "description": description, - "project": project, - "path": path, - "host": host, - "port": port, - "password": password, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + title=title, + description=description, + project=project, + path=path, + host=host, + port=port, + password=password, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisUpdateResponse, - parse_obj_as( - type_=RedisUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> RedisImportStorage: """ @@ -1080,21 +881,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/redis/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RedisImportStorage, - parse_obj_as( - type_=RedisImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/redis/raw_client.py b/src/label_studio_sdk/import_storage/redis/raw_client.py new file mode 100644 index 000000000..2411795ad --- /dev/null +++ b/src/label_studio_sdk/import_storage/redis/raw_client.py @@ -0,0 +1,917 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.redis_import_storage import RedisImportStorage +from .types.redis_create_response import RedisCreateResponse +from .types.redis_update_response import RedisUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawRedisClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[RedisImportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[RedisImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisCreateResponse]: + """ + + Create a new source storage connection to a Redis database. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/redis/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisImportStorage]: + """ + + Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RedisImportStorage]: + """ + + Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RedisImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawRedisClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[RedisImportStorage]]: + """ + + You can connect your Redis database to Label Studio as a source storage or target storage. Use this API request to get a list of all Redis import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[RedisImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[RedisImportStorage], + parse_obj_as( + type_=typing.List[RedisImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisCreateResponse]: + """ + + Create a new source storage connection to a Redis database. + + For information about the required fields and prerequisites, see [Redis database](https://labelstud.io/guide/storage#Redis-database) in the Label Studio documentation. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisCreateResponse, + parse_obj_as( + type_=RedisCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific Redis import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/redis/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisImportStorage]: + """ + + Get a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + path: typing.Optional[str] = OMIT, + host: typing.Optional[str] = OMIT, + port: typing.Optional[str] = OMIT, + password: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RedisUpdateResponse]: + """ + + Update a specific Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this redis import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + path : typing.Optional[str] + Storage prefix (optional) + + host : typing.Optional[str] + Server Host IP (optional) + + port : typing.Optional[str] + Server Port (optional) + + password : typing.Optional[str] + Server Password (optional) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "title": title, + "description": description, + "project": project, + "path": path, + "host": host, + "port": port, + "password": password, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisUpdateResponse, + parse_obj_as( + type_=RedisUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RedisImportStorage]: + """ + + Sync tasks from a Redis import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external databases only go one way. They either create tasks from objects in the database (source/import storage) or push annotations to the output database (export/target storage). Changing something on the database side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RedisImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/redis/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RedisImportStorage, + parse_obj_as( + type_=RedisImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/redis/types/__init__.py b/src/label_studio_sdk/import_storage/redis/types/__init__.py index b3557bc08..aea7ed291 100644 --- a/src/label_studio_sdk/import_storage/redis/types/__init__.py +++ b/src/label_studio_sdk/import_storage/redis/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .redis_create_response import RedisCreateResponse from .redis_update_response import RedisUpdateResponse diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py index fa8fba715..c2dda07b6 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py index 247ff9057..6d9310ee1 100644 --- a/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py +++ b/src/label_studio_sdk/import_storage/redis/types/redis_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class RedisUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3/__init__.py b/src/label_studio_sdk/import_storage/s3/__init__.py index c749fe227..e6421caaf 100644 --- a/src/label_studio_sdk/import_storage/s3/__init__.py +++ b/src/label_studio_sdk/import_storage/s3/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import S3CreateResponse, S3UpdateResponse __all__ = ["S3CreateResponse", "S3UpdateResponse"] diff --git a/src/label_studio_sdk/import_storage/s3/client.py b/src/label_studio_sdk/import_storage/s3/client.py index 3884545d8..6219d680c 100644 --- a/src/label_studio_sdk/import_storage/s3/client.py +++ b/src/label_studio_sdk/import_storage/s3/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3import_storage import S3ImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawS3Client, RawS3Client from .types.s3create_response import S3CreateResponse -from ...core.jsonable_encoder import jsonable_encoder from .types.s3update_response import S3UpdateResponse -from ...core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,13 +15,21 @@ class S3Client: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3Client + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ImportStorage]: """ @@ -56,27 +61,8 @@ def list( ) client.import_storage.s3.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ImportStorage], - parse_obj_as( - type_=typing.List[S3ImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -176,46 +162,26 @@ def create( ) client.import_storage.s3.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -312,41 +278,27 @@ def validate( ) client.import_storage.s3.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -379,24 +331,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -430,18 +366,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -543,46 +469,27 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -617,35 +524,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncS3Client: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3Client(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3Client: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3Client + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3ImportStorage]: """ @@ -685,27 +584,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3ImportStorage], - parse_obj_as( - type_=typing.List[S3ImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -813,46 +693,26 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3CreateResponse, - parse_obj_as( - type_=S3CreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -957,41 +817,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3/validate", - method="POST", - json={ - "id": id, - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + id=id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -1032,24 +878,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -1091,18 +921,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1212,46 +1032,27 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "aws_session_token": aws_session_token, - "aws_sse_kms_key_id": aws_sse_kms_key_id, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_sse_kms_key_id=aws_sse_kms_key_id, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3UpdateResponse, - parse_obj_as( - type_=S3UpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3ImportStorage: """ @@ -1294,21 +1095,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3ImportStorage, - parse_obj_as( - type_=S3ImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/s3/raw_client.py b/src/label_studio_sdk/import_storage/s3/raw_client.py new file mode 100644 index 000000000..9604ce40d --- /dev/null +++ b/src/label_studio_sdk/import_storage/s3/raw_client.py @@ -0,0 +1,1129 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3import_storage import S3ImportStorage +from .types.s3create_response import S3CreateResponse +from .types.s3update_response import S3UpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3Client: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3ImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3ImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3CreateResponse]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3CreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[S3ImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3UpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3ImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3ImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3Client: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3ImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3ImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3ImportStorage], + parse_obj_as( + type_=typing.List[S3ImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3CreateResponse]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3CreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3CreateResponse, + parse_obj_as( + type_=S3CreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + id: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + id : typing.Optional[int] + Storage ID. If set, storage with specified ID will be updated + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3/validate", + method="POST", + json={ + "id": id, + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + aws_access_key_id: typing.Optional[str] = OMIT, + aws_secret_access_key: typing.Optional[str] = OMIT, + aws_session_token: typing.Optional[str] = OMIT, + aws_sse_kms_key_id: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3UpdateResponse]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + A unique integer value identifying this s3 import storage. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + aws_access_key_id : typing.Optional[str] + AWS_ACCESS_KEY_ID + + aws_secret_access_key : typing.Optional[str] + AWS_SECRET_ACCESS_KEY + + aws_session_token : typing.Optional[str] + AWS_SESSION_TOKEN + + aws_sse_kms_key_id : typing.Optional[str] + AWS SSE KMS Key ID + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3UpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "aws_session_token": aws_session_token, + "aws_sse_kms_key_id": aws_sse_kms_key_id, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3UpdateResponse, + parse_obj_as( + type_=S3UpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3ImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Sync operations with external buckets only go one way. They either create tasks from objects in the bucket (source/import storage) or push annotations to the output bucket (export/target storage). Changing something on the bucket side doesn’t guarantee consistency in results. + + Before proceeding, you should review [How sync operations work - Source storage](https://labelstud.io/guide/storage#Source-storage) to ensure that your data remains secure and private. + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3ImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3ImportStorage, + parse_obj_as( + type_=S3ImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/s3/types/__init__.py b/src/label_studio_sdk/import_storage/s3/types/__init__.py index 3cc20ce7b..bb333983a 100644 --- a/src/label_studio_sdk/import_storage/s3/types/__init__.py +++ b/src/label_studio_sdk/import_storage/s3/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .s3create_response import S3CreateResponse from .s3update_response import S3UpdateResponse diff --git a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py index 86b6e5fd3..ac8bed298 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3create_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3create_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3CreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py index 3ed56596d..95d6fe001 100644 --- a/src/label_studio_sdk/import_storage/s3/types/s3update_response.py +++ b/src/label_studio_sdk/import_storage/s3/types/s3update_response.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ....core.serialization import FieldMetadata -from ....core.pydantic_utilities import IS_PYDANTIC_V2 class S3UpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/import_storage/s3s/__init__.py b/src/label_studio_sdk/import_storage/s3s/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/import_storage/s3s/__init__.py +++ b/src/label_studio_sdk/import_storage/s3s/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/import_storage/s3s/client.py b/src/label_studio_sdk/import_storage/s3s/client.py index a5163a1a5..4f39746fe 100644 --- a/src/label_studio_sdk/import_storage/s3s/client.py +++ b/src/label_studio_sdk/import_storage/s3s/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.s3s_import_storage import S3SImportStorage -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...core.jsonable_encoder import jsonable_encoder -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawS3SClient, RawS3SClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,13 +13,21 @@ class S3SClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawS3SClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SImportStorage]: """ @@ -54,27 +59,8 @@ def list( ) client.import_storage.s3s.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SImportStorage], - parse_obj_as( - type_=typing.List[S3SImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -166,44 +152,24 @@ def create( ) client.import_storage.s3s.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -234,24 +200,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -285,18 +235,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -390,44 +330,25 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate( self, @@ -512,38 +433,24 @@ def validate( ) client.import_storage.s3s.validate() """ - _response = self._client_wrapper.httpx_client.request( - "api/storages/s3s/validate", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.validate( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -574,35 +481,27 @@ def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.sync(id, request_options=request_options) + return _response.data class AsyncS3SClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawS3SClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawS3SClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawS3SClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[S3SImportStorage]: """ @@ -642,27 +541,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[S3SImportStorage], - parse_obj_as( - type_=typing.List[S3SImportStorage], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -762,44 +642,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -838,24 +698,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -897,18 +741,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1010,44 +844,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}", - method="PATCH", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate( self, @@ -1140,38 +955,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/storages/s3s/validate", - method="POST", - json={ - "regex_filter": regex_filter, - "use_blob_urls": use_blob_urls, - "presign": presign, - "presign_ttl": presign_ttl, - "recursive_scan": recursive_scan, - "title": title, - "description": description, - "project": project, - "bucket": bucket, - "prefix": prefix, - "external_id": external_id, - "role_arn": role_arn, - "region_name": region_name, - "s3_endpoint": s3endpoint, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.validate( + regex_filter=regex_filter, + use_blob_urls=use_blob_urls, + presign=presign, + presign_ttl=presign_ttl, + recursive_scan=recursive_scan, + title=title, + description=description, + project=project, + bucket=bucket, + prefix=prefix, + external_id=external_id, + role_arn=role_arn, + region_name=region_name, + s3endpoint=s3endpoint, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> S3SImportStorage: """ @@ -1210,21 +1011,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/storages/s3s/{jsonable_encoder(id)}/sync", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - S3SImportStorage, - parse_obj_as( - type_=S3SImportStorage, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.sync(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/import_storage/s3s/raw_client.py b/src/label_studio_sdk/import_storage/s3s/raw_client.py new file mode 100644 index 000000000..52f3367ae --- /dev/null +++ b/src/label_studio_sdk/import_storage/s3s/raw_client.py @@ -0,0 +1,1047 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.s3s_import_storage import S3SImportStorage + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawS3SClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[S3SImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[S3SImportStorage]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SImportStorage]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3SImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[S3SImportStorage]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + Import storage ID + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[S3SImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[S3SImportStorage] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawS3SClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[S3SImportStorage]]: + """ + + You can connect your S3 bucket to Label Studio as a source storage or target storage. Use this API request to get a list of all Google import (source) storage connections for a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[S3SImportStorage]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[S3SImportStorage], + parse_obj_as( + type_=typing.List[S3SImportStorage], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Create a new source storage connection to a S3 bucket. + + For information about the required fields and prerequisites, see [Amazon S3](https://labelstud.io/guide/storage#Amazon-S3) in the Label Studio documentation. + + Ensure you configure CORS before adding cloud storage. This ensures you will be able to see the content of the data rather than just a link. + + After you add the storage, you should validate the connection before attempting to sync your data. Your data will not be imported until you [sync your connection](sync). + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Get a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Deleting a source storage connection does not affect tasks with synced data in Label Studio. The sync process is designed to import new or updated tasks from the connected storage into the project, but it does not track deletions of files from the storage. Therefore, if you remove the external storage connection, the tasks that were created from that storage will remain in the project. + + If you want to remove the tasks that were synced from the external storage, you will need to delete them manually from within the Label Studio UI or use the [Delete tasks](../../tasks/delete-all-tasks) API. + + Parameters + ---------- + id : int + Import storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Update a specific S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + For more information about working with external storage, see [Sync data from external storage](https://labelstud.io/guide/storage). + + Parameters + ---------- + id : int + Import storage ID + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}", + method="PATCH", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate( + self, + *, + regex_filter: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + recursive_scan: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + bucket: typing.Optional[str] = OMIT, + prefix: typing.Optional[str] = OMIT, + external_id: typing.Optional[str] = OMIT, + role_arn: typing.Optional[str] = OMIT, + region_name: typing.Optional[str] = OMIT, + s3endpoint: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Validate a specific S3 import storage connection. This is useful to ensure that the storage configuration settings are correct and operational before attempting to import data. + + Parameters + ---------- + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + + presign : typing.Optional[bool] + Presign URLs for download + + presign_ttl : typing.Optional[int] + Presign TTL in minutes + + recursive_scan : typing.Optional[bool] + Scan recursively + + title : typing.Optional[str] + Storage title + + description : typing.Optional[str] + Storage description + + project : typing.Optional[int] + Project ID + + bucket : typing.Optional[str] + S3 bucket name + + prefix : typing.Optional[str] + S3 bucket prefix + + external_id : typing.Optional[str] + AWS External ID + + role_arn : typing.Optional[str] + AWS Role ARN + + region_name : typing.Optional[str] + AWS Region + + s3endpoint : typing.Optional[str] + S3 Endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/s3s/validate", + method="POST", + json={ + "regex_filter": regex_filter, + "use_blob_urls": use_blob_urls, + "presign": presign, + "presign_ttl": presign_ttl, + "recursive_scan": recursive_scan, + "title": title, + "description": description, + "project": project, + "bucket": bucket, + "prefix": prefix, + "external_id": external_id, + "role_arn": role_arn, + "region_name": region_name, + "s3_endpoint": s3endpoint, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[S3SImportStorage]: + """ + + Sync tasks from an S3 import storage connection. You will need to provide the import storage ID. You can find this using [List import storages](list). + + Parameters + ---------- + id : int + Storage ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[S3SImportStorage] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/s3s/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + S3SImportStorage, + parse_obj_as( + type_=S3SImportStorage, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/import_storage/types/__init__.py b/src/label_studio_sdk/import_storage/types/__init__.py index f995a3c6d..f82663649 100644 --- a/src/label_studio_sdk/import_storage/types/__init__.py +++ b/src/label_studio_sdk/import_storage/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .import_storage_list_types_response_item import ImportStorageListTypesResponseItem __all__ = ["ImportStorageListTypesResponseItem"] diff --git a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py index 21112358a..3247ea665 100644 --- a/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py +++ b/src/label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ImportStorageListTypesResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/jwt_settings/__init__.py b/src/label_studio_sdk/jwt_settings/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/jwt_settings/__init__.py +++ b/src/label_studio_sdk/jwt_settings/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/jwt_settings/client.py b/src/label_studio_sdk/jwt_settings/client.py index 338513a6c..653bddc46 100644 --- a/src/label_studio_sdk/jwt_settings/client.py +++ b/src/label_studio_sdk/jwt_settings/client.py @@ -1,13 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.jwt_settings_response import JwtSettingsResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawJwtSettingsClient, RawJwtSettingsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -15,7 +13,18 @@ class JwtSettingsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawJwtSettingsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawJwtSettingsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawJwtSettingsClient + """ + return self._raw_client def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> JwtSettingsResponse: """ @@ -40,24 +49,8 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Jwt ) client.jwt_settings.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(request_options=request_options) + return _response.data def create( self, @@ -102,35 +95,29 @@ def create( api_token_ttl_days=1, ) """ - _response = self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="POST", - json={ - "api_tokens_enabled": api_tokens_enabled, - "legacy_api_tokens_enabled": legacy_api_tokens_enabled, - "api_token_ttl_days": api_token_ttl_days, - }, + _response = self._raw_client.create( + api_tokens_enabled=api_tokens_enabled, + legacy_api_tokens_enabled=legacy_api_tokens_enabled, + api_token_ttl_days=api_token_ttl_days, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncJwtSettingsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawJwtSettingsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawJwtSettingsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawJwtSettingsClient + """ + return self._raw_client async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> JwtSettingsResponse: """ @@ -163,24 +150,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(request_options=request_options) + return _response.data async def create( self, @@ -233,27 +204,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/jwt/settings", - method="POST", - json={ - "api_tokens_enabled": api_tokens_enabled, - "legacy_api_tokens_enabled": legacy_api_tokens_enabled, - "api_token_ttl_days": api_token_ttl_days, - }, + _response = await self._raw_client.create( + api_tokens_enabled=api_tokens_enabled, + legacy_api_tokens_enabled=legacy_api_tokens_enabled, + api_token_ttl_days=api_token_ttl_days, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - JwtSettingsResponse, - parse_obj_as( - type_=JwtSettingsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/jwt_settings/raw_client.py b/src/label_studio_sdk/jwt_settings/raw_client.py new file mode 100644 index 000000000..33cf55a1b --- /dev/null +++ b/src/label_studio_sdk/jwt_settings/raw_client.py @@ -0,0 +1,212 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.jwt_settings_response import JwtSettingsResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawJwtSettingsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[JwtSettingsResponse]: + """ + Retrieve JWT settings for the currently-active organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[JwtSettingsResponse] + JWT settings retrieved successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + api_tokens_enabled: bool, + legacy_api_tokens_enabled: bool, + api_token_ttl_days: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[JwtSettingsResponse]: + """ + Update JWT settings for the currently active organization. + + Parameters + ---------- + api_tokens_enabled : bool + Whether JWT API tokens are enabled + + legacy_api_tokens_enabled : bool + Whether legacy API tokens are enabled + + api_token_ttl_days : int + Number of days before API tokens expire + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[JwtSettingsResponse] + JWT settings updated successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="POST", + json={ + "api_tokens_enabled": api_tokens_enabled, + "legacy_api_tokens_enabled": legacy_api_tokens_enabled, + "api_token_ttl_days": api_token_ttl_days, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawJwtSettingsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[JwtSettingsResponse]: + """ + Retrieve JWT settings for the currently-active organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[JwtSettingsResponse] + JWT settings retrieved successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + api_tokens_enabled: bool, + legacy_api_tokens_enabled: bool, + api_token_ttl_days: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[JwtSettingsResponse]: + """ + Update JWT settings for the currently active organization. + + Parameters + ---------- + api_tokens_enabled : bool + Whether JWT API tokens are enabled + + legacy_api_tokens_enabled : bool + Whether legacy API tokens are enabled + + api_token_ttl_days : int + Number of days before API tokens expire + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[JwtSettingsResponse] + JWT settings updated successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/jwt/settings", + method="POST", + json={ + "api_tokens_enabled": api_tokens_enabled, + "legacy_api_tokens_enabled": legacy_api_tokens_enabled, + "api_token_ttl_days": api_token_ttl_days, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + JwtSettingsResponse, + parse_obj_as( + type_=JwtSettingsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/ml/__init__.py b/src/label_studio_sdk/ml/__init__.py index e0f97600c..613f98967 100644 --- a/src/label_studio_sdk/ml/__init__.py +++ b/src/label_studio_sdk/ml/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( MlCreateRequestAuthMethod, MlCreateResponse, diff --git a/src/label_studio_sdk/ml/client.py b/src/label_studio_sdk/ml/client.py index 36be4dea6..2284a86d3 100644 --- a/src/label_studio_sdk/ml/client.py +++ b/src/label_studio_sdk/ml/client.py @@ -1,19 +1,15 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.ml_backend import MlBackend -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from .raw_client import AsyncRawMlClient, RawMlClient from .types.ml_create_request_auth_method import MlCreateRequestAuthMethod from .types.ml_create_response import MlCreateResponse -from ..core.jsonable_encoder import jsonable_encoder from .types.ml_update_request_auth_method import MlUpdateRequestAuthMethod from .types.ml_update_response import MlUpdateResponse -from ..errors.internal_server_error import InternalServerError -from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -21,13 +17,21 @@ class MlClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawMlClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawMlClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawMlClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[MlBackend]: """ @@ -58,27 +62,8 @@ def list( ) client.ml.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/ml/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MlBackend], - parse_obj_as( - type_=typing.List[MlBackend], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -152,40 +137,20 @@ def create( ) client.ml.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/ml/", - method="POST", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlCreateResponse, - parse_obj_as( - type_=MlCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ @@ -218,24 +183,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlBackend, - parse_obj_as( - type_=MlBackend, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -267,18 +216,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -356,40 +295,21 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="PATCH", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlUpdateResponse, - parse_obj_as( - type_=MlUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def predict_interactive( self, @@ -439,26 +359,10 @@ def predict_interactive( task=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/interactive-annotating", - method="POST", - json={ - "task": task, - "context": context, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.predict_interactive( + id, task=task, context=context, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def train( self, @@ -501,35 +405,8 @@ def train( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/train", - method="POST", - json={ - "use_ground_truth": use_ground_truth, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 500: - raise InternalServerError( - typing.cast( - str, - parse_obj_as( - type_=str, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.train(id, use_ground_truth=use_ground_truth, request_options=request_options) + return _response.data def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -558,29 +435,27 @@ def list_model_versions(self, id: str, *, request_options: typing.Optional[Reque id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list_model_versions(id, request_options=request_options) + return _response.data class AsyncMlClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawMlClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawMlClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawMlClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[MlBackend]: """ @@ -619,27 +494,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/ml/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MlBackend], - parse_obj_as( - type_=typing.List[MlBackend], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -721,40 +577,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/ml/", - method="POST", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlCreateResponse, - parse_obj_as( - type_=MlCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> MlBackend: """ @@ -795,24 +631,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlBackend, - parse_obj_as( - type_=MlBackend, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -852,18 +672,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -949,40 +759,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}", - method="PATCH", - json={ - "url": url, - "project": project, - "is_interactive": is_interactive, - "title": title, - "description": description, - "auth_method": auth_method, - "basic_auth_user": basic_auth_user, - "basic_auth_pass": basic_auth_pass, - "extra_params": extra_params, - "timeout": timeout, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + url=url, + project=project, + is_interactive=is_interactive, + title=title, + description=description, + auth_method=auth_method, + basic_auth_user=basic_auth_user, + basic_auth_pass=basic_auth_pass, + extra_params=extra_params, + timeout=timeout, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MlUpdateResponse, - parse_obj_as( - type_=MlUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def predict_interactive( self, @@ -1040,26 +831,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/interactive-annotating", - method="POST", - json={ - "task": task, - "context": context, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.predict_interactive( + id, task=task, context=context, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def train( self, @@ -1110,35 +885,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/train", - method="POST", - json={ - "use_ground_truth": use_ground_truth, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 500: - raise InternalServerError( - typing.cast( - str, - parse_obj_as( - type_=str, # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.train(id, use_ground_truth=use_ground_truth, request_options=request_options) + return _response.data async def list_model_versions(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -1175,15 +923,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/ml/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_model_versions(id, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/ml/raw_client.py b/src/label_studio_sdk/ml/raw_client.py new file mode 100644 index 000000000..30ebeee47 --- /dev/null +++ b/src/label_studio_sdk/ml/raw_client.py @@ -0,0 +1,968 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.internal_server_error import InternalServerError +from ..types.ml_backend import MlBackend +from .types.ml_create_request_auth_method import MlCreateRequestAuthMethod +from .types.ml_create_response import MlCreateResponse +from .types.ml_update_request_auth_method import MlUpdateRequestAuthMethod +from .types.ml_update_response import MlUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawMlClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[MlBackend]]: + """ + + List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). + + + You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[MlBackend]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MlCreateResponse]: + """ + + Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). + + If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). + + If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). + + Parameters + ---------- + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlCreateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MlCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/ml/", + method="POST", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[MlBackend]: + """ + + Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MlBackend] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[MlUpdateResponse]: + """ + + Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlUpdateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MlUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="PATCH", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def predict_interactive( + self, + id: int, + *, + task: int, + context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Enable interactive pre-annotations for a specific task. + + ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). + + Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). + + You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + task : int + ID of task to annotate + + context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Context for ML model + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/interactive-annotating", + method="POST", + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def train( + self, + id: int, + *, + use_ground_truth: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. + + For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). + + You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + use_ground_truth : typing.Optional[bool] + Whether to include ground truth annotations in training + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/train", + method="POST", + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list_model_versions( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawMlClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[MlBackend]]: + """ + + List all configured Machine Learning (ML) backends for a specific project by ID. For more information about ML backends, see [Machine learning integration](https://labelstud.io/guide/ml). + + + You will need to provide the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[MlBackend]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/ml/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MlBackend], + parse_obj_as( + type_=typing.List[MlBackend], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlCreateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MlCreateResponse]: + """ + + Add an ML backend to a project. For more information about what you need to configure when adding an ML backend, see [Connect the model to Label studio](https://labelstud.io/guide/ml#Connect-the-model-to-Label-Studio). + + If you are using Docker Compose, you may need to adjust your ML backend URL. See [localhost and Docker containers](https://labelstud.io/guide/ml#localhost-and-Docker-containers). + + If you are using files that are located in the cloud, local storage, or uploaded to Label Studio, you must configure your environment variables to allow the ML backend to interact with those files. See [Allow the ML backend to access Label Studio](https://labelstud.io/guide/ml#Allow-the-ML-backend-to-access-Label-Studio-data). + + Parameters + ---------- + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlCreateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MlCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/ml/", + method="POST", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlCreateResponse, + parse_obj_as( + type_=MlCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[MlBackend]: + """ + + Get details about a specific ML backend. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MlBackend] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlBackend, + parse_obj_as( + type_=MlBackend, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Remove an existing ML backend connection. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + url: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + is_interactive: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + auth_method: typing.Optional[MlUpdateRequestAuthMethod] = OMIT, + basic_auth_user: typing.Optional[str] = OMIT, + basic_auth_pass: typing.Optional[str] = OMIT, + extra_params: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + timeout: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[MlUpdateResponse]: + """ + + Update the ML backend parameters. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + For more information, see [Machine learning integration](https://labelstud.io/guide/ml). + + Parameters + ---------- + id : int + A unique integer value identifying this ml backend. + + url : typing.Optional[str] + ML backend URL + + project : typing.Optional[int] + Project ID + + is_interactive : typing.Optional[bool] + Is interactive + + title : typing.Optional[str] + Title + + description : typing.Optional[str] + Description + + auth_method : typing.Optional[MlUpdateRequestAuthMethod] + Auth method + + basic_auth_user : typing.Optional[str] + Basic auth user + + basic_auth_pass : typing.Optional[str] + Basic auth password + + extra_params : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Extra parameters + + timeout : typing.Optional[int] + Response model timeout + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MlUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}", + method="PATCH", + json={ + "url": url, + "project": project, + "is_interactive": is_interactive, + "title": title, + "description": description, + "auth_method": auth_method, + "basic_auth_user": basic_auth_user, + "basic_auth_pass": basic_auth_pass, + "extra_params": extra_params, + "timeout": timeout, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MlUpdateResponse, + parse_obj_as( + type_=MlUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def predict_interactive( + self, + id: int, + *, + task: int, + context: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Enable interactive pre-annotations for a specific task. + + ML-assisted labeling with interactive pre-annotations works with image segmentation and object detection tasks using rectangles, ellipses, polygons, brush masks, and keypoints, as well as with HTML and text named entity recognition tasks. Your ML backend must support the type of labeling that you’re performing, recognize the input that you create, and be able to respond with the relevant output for a prediction. For more information, see [Interactive pre-annotations](https://labelstud.io/guide/ml.html#Interactive-pre-annotations). + + Before you can use interactive annotations, it must be enabled for you ML backend connection (`"is_interactive": true`). + + You will need the task ID and the ML backend connection ID. The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](../tasks/list). The ML backend connection ID is available via [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + task : int + ID of task to annotate + + context : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Context for ML model + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/interactive-annotating", + method="POST", + json={ + "task": task, + "context": context, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def train( + self, + id: int, + *, + use_ground_truth: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + After you connect a model to Label Studio as a machine learning backend and annotate at least one task, you can start training the model. Training logs appear in stdout and the console. + + For more information, see [Model training](https://labelstud.io/guide/ml.html#Model-training). + + You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : int + A unique integer value identifying this ML backend. + + use_ground_truth : typing.Optional[bool] + Whether to include ground truth annotations in training + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/train", + method="POST", + json={ + "use_ground_truth": use_ground_truth, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + if _response.status_code == 500: + raise InternalServerError( + headers=dict(_response.headers), + body=typing.cast( + str, + parse_obj_as( + type_=str, # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list_model_versions( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Get available versions of the model. You will need to specify an ID for the backend connection. You can find this using [List ML backends](list). + + Parameters + ---------- + id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/ml/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/ml/types/__init__.py b/src/label_studio_sdk/ml/types/__init__.py index b308dc9d2..06b8b419d 100644 --- a/src/label_studio_sdk/ml/types/__init__.py +++ b/src/label_studio_sdk/ml/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .ml_create_request_auth_method import MlCreateRequestAuthMethod from .ml_create_response import MlCreateResponse from .ml_create_response_auth_method import MlCreateResponseAuthMethod diff --git a/src/label_studio_sdk/ml/types/ml_create_response.py b/src/label_studio_sdk/ml/types/ml_create_response.py index 030fa3d3c..abb00160f 100644 --- a/src/label_studio_sdk/ml/types/ml_create_response.py +++ b/src/label_studio_sdk/ml/types/ml_create_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_create_response_auth_method import MlCreateResponseAuthMethod -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class MlCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/ml/types/ml_update_response.py b/src/label_studio_sdk/ml/types/ml_update_response.py index f23e5dadd..5220c5bce 100644 --- a/src/label_studio_sdk/ml/types/ml_update_response.py +++ b/src/label_studio_sdk/ml/types/ml_update_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_update_response_auth_method import MlUpdateResponseAuthMethod -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class MlUpdateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/model_providers/__init__.py b/src/label_studio_sdk/model_providers/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/model_providers/__init__.py +++ b/src/label_studio_sdk/model_providers/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/model_providers/client.py b/src/label_studio_sdk/model_providers/client.py index 8707af4fd..c285b4e77 100644 --- a/src/label_studio_sdk/model_providers/client.py +++ b/src/label_studio_sdk/model_providers/client.py @@ -1,27 +1,17 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.model_provider_connection import ModelProviderConnection -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod +from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization from ..types.model_provider_connection_provider import ModelProviderConnectionProvider from ..types.model_provider_connection_scope import ModelProviderConnectionScope -from ..types.model_provider_connection_organization import ( - ModelProviderConnectionOrganization, -) -from ..types.model_provider_connection_created_by import ( - ModelProviderConnectionCreatedBy, -) -import datetime as dt -from ..types.model_provider_connection_budget_reset_period import ( - ModelProviderConnectionBudgetResetPeriod, -) -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawModelProvidersClient, RawModelProvidersClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -29,7 +19,18 @@ class ModelProvidersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawModelProvidersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawModelProvidersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawModelProvidersClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ModelProviderConnection]: """ @@ -54,24 +55,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.model_providers.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ModelProviderConnection], - parse_obj_as( - type_=typing.List[ModelProviderConnection], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -153,50 +138,25 @@ def create( provider="OpenAI", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="POST", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=ModelProviderConnectionOrganization, - direction="write", - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=ModelProviderConnectionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = self._raw_client.create( + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> ModelProviderConnection: """ @@ -226,24 +186,8 @@ def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = Non pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(pk, request_options=request_options) + return _response.data def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -272,18 +216,8 @@ def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(pk, request_options=request_options) + return _response.data def update( self, @@ -370,55 +304,42 @@ def update( provider="OpenAI", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="PATCH", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=ModelProviderConnectionOrganization, - direction="write", - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=ModelProviderConnectionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = self._raw_client.update( + pk, + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncModelProvidersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawModelProvidersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawModelProvidersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawModelProvidersClient + """ + return self._raw_client async def list( self, *, request_options: typing.Optional[RequestOptions] = None @@ -453,24 +374,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ModelProviderConnection], - parse_obj_as( - type_=typing.List[ModelProviderConnection], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -560,50 +465,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-provider-connections/", - method="POST", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=ModelProviderConnectionOrganization, - direction="write", - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=ModelProviderConnectionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = await self._raw_client.create( + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> ModelProviderConnection: """ @@ -641,24 +521,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(pk, request_options=request_options) + return _response.data async def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -695,18 +559,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(pk, request_options=request_options) + return _response.data async def update( self, @@ -801,47 +655,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/model-provider-connections/{jsonable_encoder(pk)}", - method="PATCH", - json={ - "provider": provider, - "api_key": api_key, - "deployment_name": deployment_name, - "endpoint": endpoint, - "scope": scope, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=ModelProviderConnectionOrganization, - direction="write", - ), - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=ModelProviderConnectionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "is_internal": is_internal, - "budget_limit": budget_limit, - "budget_last_reset_date": budget_last_reset_date, - "budget_reset_period": budget_reset_period, - "budget_total_spent": budget_total_spent, - "budget_alert_threshold": budget_alert_threshold, - }, + _response = await self._raw_client.update( + pk, + provider=provider, + api_key=api_key, + deployment_name=deployment_name, + endpoint=endpoint, + scope=scope, + organization=organization, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + is_internal=is_internal, + budget_limit=budget_limit, + budget_last_reset_date=budget_last_reset_date, + budget_reset_period=budget_reset_period, + budget_total_spent=budget_total_spent, + budget_alert_threshold=budget_alert_threshold, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ModelProviderConnection, - parse_obj_as( - type_=ModelProviderConnection, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/model_providers/raw_client.py b/src/label_studio_sdk/model_providers/raw_client.py new file mode 100644 index 000000000..e481717d1 --- /dev/null +++ b/src/label_studio_sdk/model_providers/raw_client.py @@ -0,0 +1,706 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.model_provider_connection import ModelProviderConnection +from ..types.model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod +from ..types.model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from ..types.model_provider_connection_organization import ModelProviderConnectionOrganization +from ..types.model_provider_connection_provider import ModelProviderConnectionProvider +from ..types.model_provider_connection_scope import ModelProviderConnectionScope + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawModelProvidersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ModelProviderConnection]]: + """ + Get all model provider connections created by the user in the current organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ModelProviderConnection]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ModelProviderConnection]: + """ + Create a new model provider connection. + + Parameters + ---------- + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ModelProviderConnection] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ModelProviderConnection]: + """ + Get a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ModelProviderConnection] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + pk: int, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ModelProviderConnection]: + """ + Update a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ModelProviderConnection] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="PATCH", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawModelProvidersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ModelProviderConnection]]: + """ + Get all model provider connections created by the user in the current organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ModelProviderConnection]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ModelProviderConnection], + parse_obj_as( + type_=typing.List[ModelProviderConnection], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ModelProviderConnection]: + """ + Create a new model provider connection. + + Parameters + ---------- + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ModelProviderConnection] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-provider-connections/", + method="POST", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ModelProviderConnection]: + """ + Get a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ModelProviderConnection] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + pk: int, + *, + provider: ModelProviderConnectionProvider, + api_key: typing.Optional[str] = OMIT, + deployment_name: typing.Optional[str] = OMIT, + endpoint: typing.Optional[str] = OMIT, + scope: typing.Optional[ModelProviderConnectionScope] = OMIT, + organization: typing.Optional[ModelProviderConnectionOrganization] = OMIT, + created_by: typing.Optional[ModelProviderConnectionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + is_internal: typing.Optional[bool] = OMIT, + budget_limit: typing.Optional[float] = OMIT, + budget_last_reset_date: typing.Optional[dt.datetime] = OMIT, + budget_reset_period: typing.Optional[ModelProviderConnectionBudgetResetPeriod] = OMIT, + budget_total_spent: typing.Optional[float] = OMIT, + budget_alert_threshold: typing.Optional[float] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ModelProviderConnection]: + """ + Update a model provider connection by ID. + + Parameters + ---------- + pk : int + Model Provider Connection ID + + provider : ModelProviderConnectionProvider + + api_key : typing.Optional[str] + + deployment_name : typing.Optional[str] + + endpoint : typing.Optional[str] + + scope : typing.Optional[ModelProviderConnectionScope] + + organization : typing.Optional[ModelProviderConnectionOrganization] + + created_by : typing.Optional[ModelProviderConnectionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + is_internal : typing.Optional[bool] + Whether the model provider connection is internal, not visible to the user. + + budget_limit : typing.Optional[float] + Budget limit for the model provider connection (null if unlimited) + + budget_last_reset_date : typing.Optional[dt.datetime] + Date and time the budget was last reset + + budget_reset_period : typing.Optional[ModelProviderConnectionBudgetResetPeriod] + Budget reset period for the model provider connection (null if not reset) + + budget_total_spent : typing.Optional[float] + Tracked total budget spent for the given provider connection within the current budget period + + budget_alert_threshold : typing.Optional[float] + Budget alert threshold for the given provider connection + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ModelProviderConnection] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/model-provider-connections/{jsonable_encoder(pk)}", + method="PATCH", + json={ + "provider": provider, + "api_key": api_key, + "deployment_name": deployment_name, + "endpoint": endpoint, + "scope": scope, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=ModelProviderConnectionOrganization, direction="write" + ), + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=ModelProviderConnectionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "is_internal": is_internal, + "budget_limit": budget_limit, + "budget_last_reset_date": budget_last_reset_date, + "budget_reset_period": budget_reset_period, + "budget_total_spent": budget_total_spent, + "budget_alert_threshold": budget_alert_threshold, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ModelProviderConnection, + parse_obj_as( + type_=ModelProviderConnection, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/predictions/__init__.py b/src/label_studio_sdk/predictions/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/predictions/__init__.py +++ b/src/label_studio_sdk/predictions/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/predictions/client.py b/src/label_studio_sdk/predictions/client.py index f1f84734d..f8f94ee1d 100644 --- a/src/label_studio_sdk/predictions/client.py +++ b/src/label_studio_sdk/predictions/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.prediction import Prediction -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawPredictionsClient, RawPredictionsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class PredictionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawPredictionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPredictionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPredictionsClient + """ + return self._raw_client def list( self, @@ -60,28 +68,8 @@ def list( ) client.predictions.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/predictions/", - method="GET", - params={ - "task": task, - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prediction], - parse_obj_as( - type_=typing.List[Prediction], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(task=task, project=project, request_options=request_options) + return _response.data def create( self, @@ -157,34 +145,10 @@ def create( model_version="yolo-v8", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/predictions/", - method="POST", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ @@ -217,24 +181,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -264,18 +212,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -349,39 +287,26 @@ def update( model_version="yolo-v8", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update( + id, task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPredictionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawPredictionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPredictionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPredictionsClient + """ + return self._raw_client async def list( self, @@ -433,28 +358,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/predictions/", - method="GET", - params={ - "task": task, - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prediction], - parse_obj_as( - type_=typing.List[Prediction], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(task=task, project=project, request_options=request_options) + return _response.data async def create( self, @@ -538,34 +443,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/predictions/", - method="POST", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prediction: """ @@ -606,24 +487,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -661,18 +526,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -754,31 +609,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/predictions/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "task": task, - "result": result, - "score": score, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update( + id, task=task, result=result, score=score, model_version=model_version, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prediction, - parse_obj_as( - type_=Prediction, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/predictions/raw_client.py b/src/label_studio_sdk/predictions/raw_client.py new file mode 100644 index 000000000..7fc9c7107 --- /dev/null +++ b/src/label_studio_sdk/predictions/raw_client.py @@ -0,0 +1,573 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.prediction import Prediction + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPredictionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + task: typing.Optional[int] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[Prediction]]: + """ + + Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). + + The terms "predictions" and pre-annotations" are used interchangeably. + + Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). + + To import predictions via the API, see [Create prediction](create). + + Parameters + ---------- + task : typing.Optional[int] + Filter predictions by task ID + + project : typing.Optional[int] + Filter predictions by project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Prediction]] + Predictions list + """ + _response = self._client_wrapper.httpx_client.request( + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prediction]: + """ + + If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. + + To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. + + #### JSON format for predictions + Label Studio JSON format for pre-annotations must contain two sections: + * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. + * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + + For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) + + Parameters + ---------- + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prediction] + Created prediction + """ + _response = self._client_wrapper.httpx_client.request( + "api/predictions/", + method="POST", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Prediction]: + """ + + Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prediction] + Prediction details + """ + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a prediction. To find the prediction ID, use [List predictions](list). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prediction]: + """ + + Update a prediction. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prediction] + Updated prediction + """ + _response = self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawPredictionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + task: typing.Optional[int] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[Prediction]]: + """ + + Get a list of all predictions. You can optionally filter these by task or by project. If you want to filter, you will need the project ID and/or task ID. Both of these can be found in the Label Studio URL when viewing a task, or you can use [List all projects](../projects/list) and [Get tasks list](../tasks/list). + + The terms "predictions" and pre-annotations" are used interchangeably. + + Predictions can be [imported directly into Label Studio](https://labelstud.io/guide/predictions) or [generated by a connected ML backend](https://labelstud.io/guide/ml.html#Pre-annotations-predictions). + + To import predictions via the API, see [Create prediction](create). + + Parameters + ---------- + task : typing.Optional[int] + Filter predictions by task ID + + project : typing.Optional[int] + Filter predictions by project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Prediction]] + Predictions list + """ + _response = await self._client_wrapper.httpx_client.request( + "api/predictions/", + method="GET", + params={ + "task": task, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prediction], + parse_obj_as( + type_=typing.List[Prediction], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prediction]: + """ + + If you have predictions generated for your dataset from a model, either as pre-annotated tasks or pre-labeled tasks, you can import the predictions with your dataset into Label Studio for review and correction. + + To import predicted labels into Label Studio, you must use the [Basic Label Studio JSON format](https://labelstud.io/guide/tasks#Basic-Label-Studio-JSON-format) and set up your tasks with the predictions JSON key. The Label Studio ML backend also outputs tasks in this format. + + #### JSON format for predictions + Label Studio JSON format for pre-annotations must contain two sections: + * A data object which references the source of the data that the pre-annotations apply to. This can be a URL to an audio file, a pre-signed cloud storage link to an image, plain text, a reference to a CSV file stored in Label Studio, or something else. + * A predictions array that contains the pre-annotation results for the different types of labeling. See how to add results to the predictions array. + + For more information, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations) + + Parameters + ---------- + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prediction] + Created prediction + """ + _response = await self._client_wrapper.httpx_client.request( + "api/predictions/", + method="POST", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Prediction]: + """ + + Get details about a specific prediction by its ID. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prediction] + Prediction details + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a prediction. To find the prediction ID, use [List predictions](list). + + Parameters + ---------- + id : int + Prediction ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + task: typing.Optional[int] = OMIT, + result: typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] = OMIT, + score: typing.Optional[float] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prediction]: + """ + + Update a prediction. To find the prediction ID, use [List predictions](list). + + For information about the prediction format, see [the JSON format reference in the Label Studio documentation](https://labelstud.io/guide/predictions#JSON-format-for-pre-annotations). + + Parameters + ---------- + id : int + Prediction ID + + task : typing.Optional[int] + Task ID for which the prediction is created + + result : typing.Optional[typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]]] + Prediction result in JSON format. Read more about the format in [the Label Studio documentation.](https://labelstud.io/guide/predictions) + + score : typing.Optional[float] + Prediction score. Can be used in Data Manager to sort task by model confidence. Task with the lowest score will be shown first. + + model_version : typing.Optional[str] + Model version - tag for predictions that can be used to filter tasks in Data Manager, as well as select specific model version for showing preannotations in the labeling interface + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prediction] + Updated prediction + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/predictions/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "task": task, + "result": result, + "score": score, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prediction, + parse_obj_as( + type_=Prediction, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/__init__.py b/src/label_studio_sdk/projects/__init__.py index 024a38ecc..1cdd91f96 100644 --- a/src/label_studio_sdk/projects/__init__.py +++ b/src/label_studio_sdk/projects/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ProjectsCreateResponse, ProjectsImportTasksResponse, ProjectsListResponse, ProjectsUpdateResponse from . import exports, pauses from .exports import ExportsConvertResponse, ExportsListFormatsResponseItem diff --git a/src/label_studio_sdk/projects/client.py b/src/label_studio_sdk/projects/client.py index de39e41c6..e0545b5bf 100644 --- a/src/label_studio_sdk/projects/client.py +++ b/src/label_studio_sdk/projects/client.py @@ -1,26 +1,18 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper -from .pauses.client import PausesClient -from .exports.client import ExportsClient + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.pagination import AsyncPager, SyncPager from ..core.request_options import RequestOptions -from ..core.pagination import SyncPager from ..types.project import Project -from .types.projects_list_response import ProjectsListResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from ..types.project_label_config import ProjectLabelConfig +from .exports.client import AsyncExportsClient, ExportsClient +from .pauses.client import AsyncPausesClient, PausesClient +from .raw_client import AsyncRawProjectsClient, RawProjectsClient from .types.projects_create_response import ProjectsCreateResponse -from ..core.jsonable_encoder import jsonable_encoder -from .types.projects_update_response import ProjectsUpdateResponse from .types.projects_import_tasks_response import ProjectsImportTasksResponse -from ..errors.bad_request_error import BadRequestError -from ..types.project_label_config import ProjectLabelConfig -from ..core.client_wrapper import AsyncClientWrapper -from .pauses.client import AsyncPausesClient -from .exports.client import AsyncExportsClient -from ..core.pagination import AsyncPager +from .types.projects_update_response import ProjectsUpdateResponse # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -28,9 +20,21 @@ class ProjectsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.pauses = PausesClient(client_wrapper=self._client_wrapper) - self.exports = ExportsClient(client_wrapper=self._client_wrapper) + self._raw_client = RawProjectsClient(client_wrapper=client_wrapper) + self.pauses = PausesClient(client_wrapper=client_wrapper) + + self.exports = ExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawProjectsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawProjectsClient + """ + return self._raw_client def list( self, @@ -97,45 +101,15 @@ def list( for page in response.iter_pages(): yield page """ - page = page if page is not None else 1 - _response = self._client_wrapper.httpx_client.request( - "api/projects/", - method="GET", - params={ - "ordering": ordering, - "ids": ids, - "title": title, - "page": page, - "page_size": page_size, - "workspaces": workspaces, - }, + return self._raw_client.list( + ordering=ordering, + ids=ids, + title=title, + page=page, + page_size=page_size, + workspaces=workspaces, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - ProjectsListResponse, - parse_obj_as( - type_=ProjectsListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - workspaces=workspaces, - request_options=request_options, - ) - _items = _parsed_response.results - return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -207,7 +181,7 @@ def create( Project color in HEX format control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} workspace : typing.Optional[int] Workspace ID @@ -232,45 +206,25 @@ def create( ) client.projects.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/projects/", - method="POST", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsCreateResponse, - parse_obj_as( - type_=ProjectsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Project: """ @@ -300,24 +254,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Project, - parse_obj_as( - type_=Project, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -349,18 +287,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -440,7 +368,7 @@ def update( Project color in HEX format control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} workspace : typing.Optional[int] Workspace ID @@ -467,45 +395,26 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsUpdateResponse, - parse_obj_as( - type_=ProjectsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def import_tasks( self, @@ -518,84 +427,80 @@ def import_tasks( request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsImportTasksResponse: """ - + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + - Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. - + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. - + There are three possible ways to import tasks with this endpoint: - - #### 1\. **POST with data** + + #### 1. **POST with data** Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. - + Update this example to specify your authorization token and Label Studio instance host, then run the following from the command line: - + ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' ``` - - #### 2\. **POST with files** + + #### 2. **POST with files** Send tasks as files. You can attach multiple files with different names. - + - **JSON**: text files in JavaScript object notation format - **CSV**: text files with tables in Comma Separated Values format - **TSV**: text files with tables in Tab Separated Value format - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only - + Update this example to specify your authorization token, Label Studio instance host, and file name and path, then run the following from the command line: - + ```bash - curl -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' -F 'file=@path/to/my_file.csv' ``` - - #### 3\. **POST with URL** + + #### 3. **POST with URL** You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. - + ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' \ - --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' ``` - +
- + Parameters ---------- id : int A unique integer value identifying this project. - + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] - + commit_to_project : typing.Optional[bool] Set to "true" to immediately commit tasks to the project. - + return_task_ids : typing.Optional[bool] Set to "true" to return task IDs in the response. - + preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. - + request_options : typing.Optional[RequestOptions] Request-specific configuration. - + Returns ------- ProjectsImportTasksResponse Tasks successfully imported - + Examples -------- from label_studio_sdk import LabelStudio - + client = LabelStudio( api_key="YOUR_API_KEY", ) @@ -604,48 +509,18 @@ def import_tasks( request=[{"key": "value"}], ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/import", - method="POST", - params={ - "commit_to_project": commit_to_project, - "return_task_ids": return_task_ids, - "preannotated_from_fields": preannotated_from_fields, - }, - json=request, + _response = self._raw_client.import_tasks( + id, + request=request, + commit_to_project=commit_to_project, + return_task_ids=return_task_ids, + preannotated_from_fields=preannotated_from_fields, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsImportTasksResponse, - parse_obj_as( - type_=ProjectsImportTasksResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def validate_config( - self, - id: int, - *, - label_config: str, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None ) -> ProjectLabelConfig: """ @@ -681,35 +556,27 @@ def validate_config( label_config="label_config", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/validate/", - method="POST", - json={ - "label_config": label_config, - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectLabelConfig, - parse_obj_as( - type_=ProjectLabelConfig, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.validate_config(id, label_config=label_config, request_options=request_options) + return _response.data class AsyncProjectsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.pauses = AsyncPausesClient(client_wrapper=self._client_wrapper) - self.exports = AsyncExportsClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawProjectsClient(client_wrapper=client_wrapper) + self.pauses = AsyncPausesClient(client_wrapper=client_wrapper) + + self.exports = AsyncExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawProjectsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawProjectsClient + """ + return self._raw_client async def list( self, @@ -777,6 +644,7 @@ async def main() -> None: response = await client.projects.list() async for item in response: yield item + # alternatively, you can paginate page-by-page async for page in response.iter_pages(): yield page @@ -784,45 +652,15 @@ async def main() -> None: asyncio.run(main()) """ - page = page if page is not None else 1 - _response = await self._client_wrapper.httpx_client.request( - "api/projects/", - method="GET", - params={ - "ordering": ordering, - "ids": ids, - "title": title, - "page": page, - "page_size": page_size, - "workspaces": workspaces, - }, + return await self._raw_client.list( + ordering=ordering, + ids=ids, + title=title, + page=page, + page_size=page_size, + workspaces=workspaces, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - ProjectsListResponse, - parse_obj_as( - type_=ProjectsListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - ordering=ordering, - ids=ids, - title=title, - page=page + 1, - page_size=page_size, - workspaces=workspaces, - request_options=request_options, - ) - _items = _parsed_response.results - return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -894,7 +732,7 @@ async def create( Project color in HEX format control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} workspace : typing.Optional[int] Workspace ID @@ -927,45 +765,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/projects/", - method="POST", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsCreateResponse, - parse_obj_as( - type_=ProjectsCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Project: """ @@ -1003,24 +821,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Project, - parse_obj_as( - type_=Project, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -1060,18 +862,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1151,7 +943,7 @@ async def update( Project color in HEX format control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} workspace : typing.Optional[int] Workspace ID @@ -1186,45 +978,26 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "title": title, - "description": description, - "label_config": label_config, - "expert_instruction": expert_instruction, - "show_instruction": show_instruction, - "show_skip_button": show_skip_button, - "enable_empty_annotation": enable_empty_annotation, - "show_annotation_history": show_annotation_history, - "reveal_preannotations_interactively": reveal_preannotations_interactively, - "show_collab_predictions": show_collab_predictions, - "maximum_annotations": maximum_annotations, - "color": color, - "control_weights": control_weights, - "workspace": workspace, - "model_version": model_version, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + label_config=label_config, + expert_instruction=expert_instruction, + show_instruction=show_instruction, + show_skip_button=show_skip_button, + enable_empty_annotation=enable_empty_annotation, + show_annotation_history=show_annotation_history, + reveal_preannotations_interactively=reveal_preannotations_interactively, + show_collab_predictions=show_collab_predictions, + maximum_annotations=maximum_annotations, + color=color, + control_weights=control_weights, + workspace=workspace, + model_version=model_version, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsUpdateResponse, - parse_obj_as( - type_=ProjectsUpdateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def import_tasks( self, @@ -1237,142 +1010,108 @@ async def import_tasks( request_options: typing.Optional[RequestOptions] = None, ) -> ProjectsImportTasksResponse: """ - + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. - The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). - + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + - Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. - + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. - + There are three possible ways to import tasks with this endpoint: - - #### 1\. **POST with data** + + #### 1. **POST with data** Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. - + Update this example to specify your authorization token and Label Studio instance host, then run the following from the command line: - + ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' ``` - - #### 2\. **POST with files** + + #### 2. **POST with files** Send tasks as files. You can attach multiple files with different names. - + - **JSON**: text files in JavaScript object notation format - **CSV**: text files with tables in Comma Separated Values format - **TSV**: text files with tables in Tab Separated Value format - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only - + Update this example to specify your authorization token, Label Studio instance host, and file name and path, then run the following from the command line: - + ```bash - curl -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' -F ‘file=@path/to/my_file.csv’ + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' -F 'file=@path/to/my_file.csv' ``` - - #### 3\. **POST with URL** + + #### 3. **POST with URL** You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. - + ```bash - curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' \ - -X POST 'https://localhost:8080/api/projects/1/import' \ - --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' ``` - +
- + Parameters ---------- id : int A unique integer value identifying this project. - + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] - + commit_to_project : typing.Optional[bool] Set to "true" to immediately commit tasks to the project. - + return_task_ids : typing.Optional[bool] Set to "true" to return task IDs in the response. - + preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. - + request_options : typing.Optional[RequestOptions] Request-specific configuration. - + Returns ------- ProjectsImportTasksResponse Tasks successfully imported - + Examples -------- import asyncio - + from label_studio_sdk import AsyncLabelStudio - + client = AsyncLabelStudio( api_key="YOUR_API_KEY", ) - - + + async def main() -> None: await client.projects.import_tasks( id=1, request=[{"key": "value"}], ) - - + + asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/import", - method="POST", - params={ - "commit_to_project": commit_to_project, - "return_task_ids": return_task_ids, - "preannotated_from_fields": preannotated_from_fields, - }, - json=request, + _response = await self._raw_client.import_tasks( + id, + request=request, + commit_to_project=commit_to_project, + return_task_ids=return_task_ids, + preannotated_from_fields=preannotated_from_fields, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectsImportTasksResponse, - parse_obj_as( - type_=ProjectsImportTasksResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def validate_config( - self, - id: int, - *, - label_config: str, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None ) -> ProjectLabelConfig: """ @@ -1416,25 +1155,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/validate/", - method="POST", - json={ - "label_config": label_config, - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.validate_config( + id, label_config=label_config, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectLabelConfig, - parse_obj_as( - type_=ProjectLabelConfig, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/projects/exports/__init__.py b/src/label_studio_sdk/projects/exports/__init__.py index e251c825b..8366b6166 100644 --- a/src/label_studio_sdk/projects/exports/__init__.py +++ b/src/label_studio_sdk/projects/exports/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ExportsConvertResponse, ExportsListFormatsResponseItem __all__ = ["ExportsConvertResponse", "ExportsListFormatsResponseItem"] diff --git a/src/label_studio_sdk/projects/exports/client.py b/src/label_studio_sdk/projects/exports/client.py index 1dcfcc35d..04f2cc5f4 100644 --- a/src/label_studio_sdk/projects/exports/client.py +++ b/src/label_studio_sdk/projects/exports/client.py @@ -1,26 +1,22 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions -from ...core.jsonable_encoder import jsonable_encoder -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem -from ...core.pydantic_utilities import parse_obj_as +from ...types.annotation_filter_options import AnnotationFilterOptions +from ...types.converted_format import ConvertedFormat from ...types.export import Export -from ...types.user_simple import UserSimple -import datetime as dt +from ...types.export_format import ExportFormat +from ...types.export_snapshot import ExportSnapshot from ...types.export_snapshot_status import ExportSnapshotStatus -from ...types.converted_format import ConvertedFormat -from ...types.task_filter_options import TaskFilterOptions -from ...types.annotation_filter_options import AnnotationFilterOptions from ...types.serialization_options import SerializationOptions -from ...types.export_snapshot import ExportSnapshot -from ...core.serialization import convert_and_respect_annotation_metadata -from ...types.export_format import ExportFormat +from ...types.task_filter_options import TaskFilterOptions +from ...types.user_simple import UserSimple +from .raw_client import AsyncRawExportsClient, RawExportsClient from .types.exports_convert_response import ExportsConvertResponse -from ...core.client_wrapper import AsyncClientWrapper +from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -28,7 +24,18 @@ class ExportsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawExportsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawExportsClient + """ + return self._raw_client def download_sync( self, @@ -55,7 +62,7 @@ def download_sync( ``` To export specific tasks with IDs of 123 and 345, run the following from the command line: ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' + curl -X GET "https://localhost:8080/api/projects/{id}/export?ids[]=123&ids[]=345" -H 'Authorization: Token abc123' --output 'annotations.json' ``` You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -83,33 +90,20 @@ def download_sync( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.Iterator[bytes] Exported data in binary format """ - with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(id)}/export", - method="GET", - params={ - "export_type": export_type, - "download_all_tasks": download_all_tasks, - "download_resources": download_resources, - "ids": ids, - }, + with self._raw_client.download_sync( + id, + export_type=export_type, + download_all_tasks=download_all_tasks, + download_resources=download_resources, + ids=ids, request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - for _chunk in _response.iter_bytes(chunk_size=_chunk_size): - yield _chunk - return - _response.read() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + ) as r: + yield from r.data def list_formats( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -144,31 +138,10 @@ def list_formats( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportsListFormatsResponseItem], - parse_obj_as( - type_=typing.List[ExportsListFormatsResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def list( - self, - project_id: int, - *, - request_options: typing.Optional[RequestOptions] = None, - ) -> typing.List[Export]: + _response = self._raw_client.list_formats(id, request_options=request_options) + return _response.data + + def list(self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Export]: """ Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -199,24 +172,8 @@ def list( project_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Export], - parse_obj_as( - type_=typing.List[Export], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project_id, request_options=request_options) + return _response.data def create( self, @@ -294,57 +251,23 @@ def create( project_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="POST", - json={ - "title": title, - "id": id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=UserSimple, direction="write" - ), - "created_at": created_at, - "finished_at": finished_at, - "status": status, - "md5": md5, - "counters": counters, - "converted_formats": convert_and_respect_annotation_metadata( - object_=converted_formats, - annotation=typing.Sequence[ConvertedFormat], - direction="write", - ), - "task_filter_options": convert_and_respect_annotation_metadata( - object_=task_filter_options, - annotation=TaskFilterOptions, - direction="write", - ), - "annotation_filter_options": convert_and_respect_annotation_metadata( - object_=annotation_filter_options, - annotation=AnnotationFilterOptions, - direction="write", - ), - "serialization_options": convert_and_respect_annotation_metadata( - object_=serialization_options, - annotation=SerializationOptions, - direction="write", - ), - }, + _response = self._raw_client.create( + project_id, + title=title, + id=id, + created_by=created_by, + created_at=created_at, + finished_at=finished_at, + status=status, + md5=md5, + counters=counters, + converted_formats=converted_formats, + task_filter_options=task_filter_options, + annotation_filter_options=annotation_filter_options, + serialization_options=serialization_options, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportSnapshot, - parse_obj_as( - type_=ExportSnapshot, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def download( self, @@ -376,37 +299,18 @@ def download( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.Iterator[bytes] Exported data in binary format """ - with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", - method="GET", - params={ - "exportType": export_type, - }, - request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - for _chunk in _response.iter_bytes(chunk_size=_chunk_size): - yield _chunk - return - _response.read() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + with self._raw_client.download( + project_id, export_pk, export_type=export_type, request_options=request_options + ) as r: + yield from r.data def get( - self, - project_id: int, - export_pk: str, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> Export: """ @@ -444,31 +348,11 @@ def get( export_pk="export_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Export, - parse_obj_as( - type_=Export, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(project_id, export_pk, request_options=request_options) + return _response.data def delete( - self, - project_id: int, - export_pk: str, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> None: """ @@ -503,18 +387,8 @@ def delete( export_pk="export_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(project_id, export_pk, request_options=request_options) + return _response.data def convert( self, @@ -568,37 +442,30 @@ def convert( export_pk="export_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", - method="POST", - json={ - "export_type": export_type, - "download_resources": download_resources, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.convert( + project_id, + export_pk, + export_type=export_type, + download_resources=download_resources, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportsConvertResponse, - parse_obj_as( - type_=ExportsConvertResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncExportsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawExportsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawExportsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawExportsClient + """ + return self._raw_client async def download_sync( self, @@ -625,7 +492,7 @@ async def download_sync( ``` To export specific tasks with IDs of 123 and 345, run the following from the command line: ```bash - curl -X GET https://localhost:8080/api/projects/{id}/export?ids[]=123\&ids[]=345 -H 'Authorization: Token abc123' --output 'annotations.json' + curl -X GET "https://localhost:8080/api/projects/{id}/export?ids[]=123&ids[]=345" -H 'Authorization: Token abc123' --output 'annotations.json' ``` You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). @@ -653,33 +520,21 @@ async def download_sync( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.AsyncIterator[bytes] Exported data in binary format """ - async with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(id)}/export", - method="GET", - params={ - "export_type": export_type, - "download_all_tasks": download_all_tasks, - "download_resources": download_resources, - "ids": ids, - }, + async with self._raw_client.download_sync( + id, + export_type=export_type, + download_all_tasks=download_all_tasks, + download_resources=download_resources, + ids=ids, request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size): - yield _chunk - return - await _response.aread() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + ) as r: + async for _chunk in r.data: + yield _chunk async def list_formats( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -722,30 +577,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/export/formats", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ExportsListFormatsResponseItem], - parse_obj_as( - type_=typing.List[ExportsListFormatsResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list_formats(id, request_options=request_options) + return _response.data async def list( - self, - project_id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Export]: """ @@ -785,24 +621,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Export], - parse_obj_as( - type_=typing.List[Export], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project_id, request_options=request_options) + return _response.data async def create( self, @@ -888,57 +708,23 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports", - method="POST", - json={ - "title": title, - "id": id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=UserSimple, direction="write" - ), - "created_at": created_at, - "finished_at": finished_at, - "status": status, - "md5": md5, - "counters": counters, - "converted_formats": convert_and_respect_annotation_metadata( - object_=converted_formats, - annotation=typing.Sequence[ConvertedFormat], - direction="write", - ), - "task_filter_options": convert_and_respect_annotation_metadata( - object_=task_filter_options, - annotation=TaskFilterOptions, - direction="write", - ), - "annotation_filter_options": convert_and_respect_annotation_metadata( - object_=annotation_filter_options, - annotation=AnnotationFilterOptions, - direction="write", - ), - "serialization_options": convert_and_respect_annotation_metadata( - object_=serialization_options, - annotation=SerializationOptions, - direction="write", - ), - }, + _response = await self._raw_client.create( + project_id, + title=title, + id=id, + created_by=created_by, + created_at=created_at, + finished_at=finished_at, + status=status, + md5=md5, + counters=counters, + converted_formats=converted_formats, + task_filter_options=task_filter_options, + annotation_filter_options=annotation_filter_options, + serialization_options=serialization_options, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportSnapshot, - parse_obj_as( - type_=ExportSnapshot, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def download( self, @@ -970,37 +756,19 @@ async def download( request_options : typing.Optional[RequestOptions] Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. - Yields - ------ + Returns + ------- typing.AsyncIterator[bytes] Exported data in binary format """ - async with self._client_wrapper.httpx_client.stream( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", - method="GET", - params={ - "exportType": export_type, - }, - request_options=request_options, - ) as _response: - try: - if 200 <= _response.status_code < 300: - _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None - async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size): - yield _chunk - return - await _response.aread() - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + async with self._raw_client.download( + project_id, export_pk, export_type=export_type, request_options=request_options + ) as r: + async for _chunk in r.data: + yield _chunk async def get( - self, - project_id: int, - export_pk: str, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> Export: """ @@ -1046,31 +814,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Export, - parse_obj_as( - type_=Export, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(project_id, export_pk, request_options=request_options) + return _response.data async def delete( - self, - project_id: int, - export_pk: str, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> None: """ @@ -1113,18 +861,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(project_id, export_pk, request_options=request_options) + return _response.data async def convert( self, @@ -1186,29 +924,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", - method="POST", - json={ - "export_type": export_type, - "download_resources": download_resources, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.convert( + project_id, + export_pk, + export_type=export_type, + download_resources=download_resources, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ExportsConvertResponse, - parse_obj_as( - type_=ExportsConvertResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/projects/exports/raw_client.py b/src/label_studio_sdk/projects/exports/raw_client.py new file mode 100644 index 000000000..f446a02ec --- /dev/null +++ b/src/label_studio_sdk/projects/exports/raw_client.py @@ -0,0 +1,1038 @@ +# This file was auto-generated by Fern from our API Definition. + +import contextlib +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.annotation_filter_options import AnnotationFilterOptions +from ...types.converted_format import ConvertedFormat +from ...types.export import Export +from ...types.export_format import ExportFormat +from ...types.export_snapshot import ExportSnapshot +from ...types.export_snapshot_status import ExportSnapshotStatus +from ...types.serialization_options import SerializationOptions +from ...types.task_filter_options import TaskFilterOptions +from ...types.user_simple import UserSimple +from .types.exports_convert_response import ExportsConvertResponse +from .types.exports_list_formats_response_item import ExportsListFormatsResponseItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawExportsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + @contextlib.contextmanager + def download_sync( + self, + id: int, + *, + export_type: typing.Optional[str] = None, + download_all_tasks: typing.Optional[bool] = None, + download_resources: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[HttpResponse[typing.Iterator[bytes]]]: + """ + + If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. + Export annotated tasks as a file in a specific format. + For example, to export JSON annotations for a project to a file called `annotations.json`, + run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export all tasks, including skipped tasks and others without annotations, run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export specific tasks with IDs of 123 and 345, run the following from the command line: + ```bash + curl -X GET "https://localhost:8080/api/projects/{id}/export?ids[]=123&ids[]=345" -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + export_type : typing.Optional[str] + Selected export format (JSON by default) + + download_all_tasks : typing.Optional[bool] + + If true, download all tasks regardless of status. If false, download only annotated tasks. + + download_resources : typing.Optional[bool] + + If true, download all resource files such as images, audio, and others relevant to the tasks. + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + + Specify a list of task IDs to retrieve only the details for those tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.Iterator[HttpResponse[typing.Iterator[bytes]]] + Exported data in binary format + """ + with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(id)}/export", + method="GET", + params={ + "export_type": export_type, + "download_all_tasks": download_all_tasks, + "download_resources": download_resources, + "ids": ids, + }, + request_options=request_options, + ) as _response: + + def _stream() -> HttpResponse[typing.Iterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return HttpResponse( + response=_response, data=(_chunk for _chunk in _response.iter_bytes(chunk_size=_chunk_size)) + ) + _response.read() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield _stream() + + def list_formats( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ExportsListFormatsResponseItem]]: + """ + + Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ExportsListFormatsResponseItem]] + Export formats + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportsListFormatsResponseItem], + parse_obj_as( + type_=typing.List[ExportsListFormatsResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[Export]]: + """ + + Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Included in the response is information about each snapshot, such as who created it and what format it is in. + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Export]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + project_id: int, + *, + title: typing.Optional[str] = OMIT, + id: typing.Optional[int] = OMIT, + created_by: typing.Optional[UserSimple] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + finished_at: typing.Optional[dt.datetime] = OMIT, + status: typing.Optional[ExportSnapshotStatus] = OMIT, + md5: typing.Optional[str] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, + task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, + annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, + serialization_options: typing.Optional[SerializationOptions] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ExportSnapshot]: + """ + + Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. + + For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + + id : typing.Optional[int] + + created_by : typing.Optional[UserSimple] + + created_at : typing.Optional[dt.datetime] + Creation time + + finished_at : typing.Optional[dt.datetime] + Complete or fail time + + status : typing.Optional[ExportSnapshotStatus] + + md5 : typing.Optional[str] + + counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] + + task_filter_options : typing.Optional[TaskFilterOptions] + + annotation_filter_options : typing.Optional[AnnotationFilterOptions] + + serialization_options : typing.Optional[SerializationOptions] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ExportSnapshot] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="POST", + json={ + "title": title, + "id": id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, annotation=TaskFilterOptions, direction="write" + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, annotation=SerializationOptions, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportSnapshot, + parse_obj_as( + type_=ExportSnapshot, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + @contextlib.contextmanager + def download( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.Iterator[HttpResponse[typing.Iterator[bytes]]]: + """ + + Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + Selected export format. JSON is available by default. For other formats, you need to convert the export first. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.Iterator[HttpResponse[typing.Iterator[bytes]]] + Exported data in binary format + """ + with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", + method="GET", + params={ + "exportType": export_type, + }, + request_options=request_options, + ) as _response: + + def _stream() -> HttpResponse[typing.Iterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return HttpResponse( + response=_response, data=(_chunk for _chunk in _response.iter_bytes(chunk_size=_chunk_size)) + ) + _response.read() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield _stream() + + def get( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[Export]: + """ + + Retrieve information about a specific export file (snapshot). + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Export] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete an export file by specified export ID. + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def convert( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = OMIT, + download_resources: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ExportsConvertResponse]: + """ + + You can use this to convert an export snapshot into the selected format. + + To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + + download_resources : typing.Optional[bool] + If true, download all resource files such as images, audio, and others relevant to the tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ExportsConvertResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", + method="POST", + json={ + "export_type": export_type, + "download_resources": download_resources, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportsConvertResponse, + parse_obj_as( + type_=ExportsConvertResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawExportsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + @contextlib.asynccontextmanager + async def download_sync( + self, + id: int, + *, + export_type: typing.Optional[str] = None, + download_all_tasks: typing.Optional[bool] = None, + download_resources: typing.Optional[bool] = None, + ids: typing.Optional[typing.Union[int, typing.Sequence[int]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]: + """ + + If you have a large project it's recommended to use export snapshots, this easy export endpoint might have timeouts. + Export annotated tasks as a file in a specific format. + For example, to export JSON annotations for a project to a file called `annotations.json`, + run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export all tasks, including skipped tasks and others without annotations, run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/{id}/export?exportType=JSON&download_all_tasks=true -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + To export specific tasks with IDs of 123 and 345, run the following from the command line: + ```bash + curl -X GET "https://localhost:8080/api/projects/{id}/export?ids[]=123&ids[]=345" -H 'Authorization: Token abc123' --output 'annotations.json' + ``` + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + export_type : typing.Optional[str] + Selected export format (JSON by default) + + download_all_tasks : typing.Optional[bool] + + If true, download all tasks regardless of status. If false, download only annotated tasks. + + download_resources : typing.Optional[bool] + + If true, download all resource files such as images, audio, and others relevant to the tasks. + + ids : typing.Optional[typing.Union[int, typing.Sequence[int]]] + + Specify a list of task IDs to retrieve only the details for those tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]] + Exported data in binary format + """ + async with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(id)}/export", + method="GET", + params={ + "export_type": export_type, + "download_all_tasks": download_all_tasks, + "download_resources": download_resources, + "ids": ids, + }, + request_options=request_options, + ) as _response: + + async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return AsyncHttpResponse( + response=_response, + data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)), + ) + await _response.aread() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield await _stream() + + async def list_formats( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ExportsListFormatsResponseItem]]: + """ + + Before exporting annotations, you can check with formats are supported by the specified project. For more information about export formats, see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You must provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ExportsListFormatsResponseItem]] + Export formats + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/export/formats", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ExportsListFormatsResponseItem], + parse_obj_as( + type_=typing.List[ExportsListFormatsResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, project_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Export]]: + """ + + Returns a list of export file (snapshots) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Included in the response is information about each snapshot, such as who created it and what format it is in. + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Export]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Export], + parse_obj_as( + type_=typing.List[Export], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + project_id: int, + *, + title: typing.Optional[str] = OMIT, + id: typing.Optional[int] = OMIT, + created_by: typing.Optional[UserSimple] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + finished_at: typing.Optional[dt.datetime] = OMIT, + status: typing.Optional[ExportSnapshotStatus] = OMIT, + md5: typing.Optional[str] = OMIT, + counters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + converted_formats: typing.Optional[typing.Sequence[ConvertedFormat]] = OMIT, + task_filter_options: typing.Optional[TaskFilterOptions] = OMIT, + annotation_filter_options: typing.Optional[AnnotationFilterOptions] = OMIT, + serialization_options: typing.Optional[SerializationOptions] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ExportSnapshot]: + """ + + Create a new export request to start a background task and generate an export file (snapshot) for a specific project by ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + A snapshot is a static export of your project's data and annotations at a specific point in time. It captures the current state of your tasks, annotations, and other relevant data, allowing you to download and review them later. Snapshots are particularly useful for large projects as they help avoid timeouts during export operations by processing the data asynchronously. + + For more information, see the [Label Studio documentation on exporting annotations](https://labelstud.io/guide/export.html). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + + id : typing.Optional[int] + + created_by : typing.Optional[UserSimple] + + created_at : typing.Optional[dt.datetime] + Creation time + + finished_at : typing.Optional[dt.datetime] + Complete or fail time + + status : typing.Optional[ExportSnapshotStatus] + + md5 : typing.Optional[str] + + counters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + + converted_formats : typing.Optional[typing.Sequence[ConvertedFormat]] + + task_filter_options : typing.Optional[TaskFilterOptions] + + annotation_filter_options : typing.Optional[AnnotationFilterOptions] + + serialization_options : typing.Optional[SerializationOptions] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ExportSnapshot] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports", + method="POST", + json={ + "title": title, + "id": id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=UserSimple, direction="write" + ), + "created_at": created_at, + "finished_at": finished_at, + "status": status, + "md5": md5, + "counters": counters, + "converted_formats": convert_and_respect_annotation_metadata( + object_=converted_formats, annotation=typing.Sequence[ConvertedFormat], direction="write" + ), + "task_filter_options": convert_and_respect_annotation_metadata( + object_=task_filter_options, annotation=TaskFilterOptions, direction="write" + ), + "annotation_filter_options": convert_and_respect_annotation_metadata( + object_=annotation_filter_options, annotation=AnnotationFilterOptions, direction="write" + ), + "serialization_options": convert_and_respect_annotation_metadata( + object_=serialization_options, annotation=SerializationOptions, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportSnapshot, + parse_obj_as( + type_=ExportSnapshot, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + @contextlib.asynccontextmanager + async def download( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]: + """ + + Download an export snapshot as a file in a specified format. To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + Selected export format. JSON is available by default. For other formats, you need to convert the export first. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response. + + Returns + ------- + typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]] + Exported data in binary format + """ + async with self._client_wrapper.httpx_client.stream( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/download", + method="GET", + params={ + "exportType": export_type, + }, + request_options=request_options, + ) as _response: + + async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: + try: + if 200 <= _response.status_code < 300: + _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None + return AsyncHttpResponse( + response=_response, + data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)), + ) + await _response.aread() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield await _stream() + + async def get( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Export]: + """ + + Retrieve information about a specific export file (snapshot). + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + You will also need the project ID. This can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Export] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Export, + parse_obj_as( + type_=Export, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, project_id: int, export_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete an export file by specified export ID. + + You will need the export ID. You can find this in the response when you [create the snapshot via the API](create) or using [List all export snapshots](list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def convert( + self, + project_id: int, + export_pk: str, + *, + export_type: typing.Optional[ExportFormat] = OMIT, + download_resources: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ExportsConvertResponse]: + """ + + You can use this to convert an export snapshot into the selected format. + + To see what formats are supported, you can use [Get export formats](list-formats) or see [Export formats supported by Label Studio](https://labelstud.io/guide/export#Export-formats-supported-by-Label-Studio). + + You will need to provide the project ID and export ID (`export_pk`). The export ID is returned when you create the export or you can use [List all export snapshots](list). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../list). + + Parameters + ---------- + project_id : int + A unique integer value identifying this project. + + export_pk : str + Primary key identifying the export file. + + export_type : typing.Optional[ExportFormat] + + download_resources : typing.Optional[bool] + If true, download all resource files such as images, audio, and others relevant to the tasks. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ExportsConvertResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_id)}/exports/{jsonable_encoder(export_pk)}/convert", + method="POST", + json={ + "export_type": export_type, + "download_resources": download_resources, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ExportsConvertResponse, + parse_obj_as( + type_=ExportsConvertResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/exports/types/__init__.py b/src/label_studio_sdk/projects/exports/types/__init__.py index 8ac8e7b26..45200ad5d 100644 --- a/src/label_studio_sdk/projects/exports/types/__init__.py +++ b/src/label_studio_sdk/projects/exports/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .exports_convert_response import ExportsConvertResponse from .exports_list_formats_response_item import ExportsListFormatsResponseItem diff --git a/src/label_studio_sdk/projects/exports/types/exports_convert_response.py b/src/label_studio_sdk/projects/exports/types/exports_convert_response.py index 1543ef9b9..3003e1e7a 100644 --- a/src/label_studio_sdk/projects/exports/types/exports_convert_response.py +++ b/src/label_studio_sdk/projects/exports/types/exports_convert_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing -from ....types.export_format import ExportFormat + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....types.export_format import ExportFormat class ExportsConvertResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py b/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py index 60bc39816..bbb67f01b 100644 --- a/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py +++ b/src/label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing -from ....types.export_format import ExportFormat + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ....types.export_format import ExportFormat class ExportsListFormatsResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/pauses/__init__.py b/src/label_studio_sdk/projects/pauses/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/projects/pauses/__init__.py +++ b/src/label_studio_sdk/projects/pauses/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/projects/pauses/client.py b/src/label_studio_sdk/projects/pauses/client.py index b0a303f43..2c1961330 100644 --- a/src/label_studio_sdk/projects/pauses/client.py +++ b/src/label_studio_sdk/projects/pauses/client.py @@ -1,14 +1,11 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.pause import Pause -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawPausesClient, RawPausesClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -16,7 +13,18 @@ class PausesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawPausesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPausesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPausesClient + """ + return self._raw_client def list( self, @@ -60,27 +68,10 @@ def list( user_pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="GET", - params={ - "include_deleted": include_deleted, - }, - request_options=request_options, + _response = self._raw_client.list( + project_pk, user_pk, include_deleted=include_deleted, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Pause], - parse_obj_as( - type_=typing.List[Pause], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create( self, @@ -127,40 +118,13 @@ def create( reason="reason", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="POST", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.create( + project_pk, user_pk, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( - self, - project_pk: int, - user_pk: int, - id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> Pause: """ Return detailed information about a specific pause. @@ -197,32 +161,11 @@ def get( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(project_pk, user_pk, id, request_options=request_options) + return _response.data def delete( - self, - project_pk: int, - user_pk: int, - id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> None: """ Remove a pause from the database. @@ -258,18 +201,8 @@ def delete( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(project_pk, user_pk, id, request_options=request_options) + return _response.data def update( self, @@ -321,37 +254,26 @@ def update( reason="reason", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="PATCH", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.update( + project_pk, user_pk, id, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPausesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawPausesClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPausesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPausesClient + """ + return self._raw_client async def list( self, @@ -403,27 +325,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="GET", - params={ - "include_deleted": include_deleted, - }, - request_options=request_options, + _response = await self._raw_client.list( + project_pk, user_pk, include_deleted=include_deleted, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Pause], - parse_obj_as( - type_=typing.List[Pause], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -478,40 +383,13 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", - method="POST", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.create( + project_pk, user_pk, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( - self, - project_pk: int, - user_pk: int, - id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> Pause: """ Return detailed information about a specific pause. @@ -556,32 +434,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(project_pk, user_pk, id, request_options=request_options) + return _response.data async def delete( - self, - project_pk: int, - user_pk: int, - id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> None: """ Remove a pause from the database. @@ -625,18 +482,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(project_pk, user_pk, id, request_options=request_options) + return _response.data async def update( self, @@ -696,29 +543,7 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", - method="PATCH", - json={ - "reason": reason, - "verbose_reason": verbose_reason, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.update( + project_pk, user_pk, id, reason=reason, verbose_reason=verbose_reason, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Pause, - parse_obj_as( - type_=Pause, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/projects/pauses/raw_client.py b/src/label_studio_sdk/projects/pauses/raw_client.py new file mode 100644 index 000000000..d109e48d4 --- /dev/null +++ b/src/label_studio_sdk/projects/pauses/raw_client.py @@ -0,0 +1,543 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.pause import Pause + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPausesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + project_pk: int, + user_pk: int, + *, + include_deleted: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[typing.List[Pause]]: + """ + Return a list of pause objects for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + include_deleted : typing.Optional[bool] + Include deleted pauses + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Pause]] + Successfully retrieved a list of pauses + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="GET", + params={ + "include_deleted": include_deleted, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Pause], + parse_obj_as( + type_=typing.List[Pause], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + project_pk: int, + user_pk: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Pause]: + """ + Create a new pause object for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Pause] + Successfully created a pause + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="POST", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[Pause]: + """ + Return detailed information about a specific pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Pause] + Successfully retrieved the pause + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + Remove a pause from the database. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + project_pk: int, + user_pk: int, + id: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Pause]: + """ + Partially update one or more fields of an existing pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Pause] + Successfully updated the pause (partial) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="PATCH", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawPausesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + project_pk: int, + user_pk: int, + *, + include_deleted: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[typing.List[Pause]]: + """ + Return a list of pause objects for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + include_deleted : typing.Optional[bool] + Include deleted pauses + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Pause]] + Successfully retrieved a list of pauses + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="GET", + params={ + "include_deleted": include_deleted, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Pause], + parse_obj_as( + type_=typing.List[Pause], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + project_pk: int, + user_pk: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Pause]: + """ + Create a new pause object for the specified project and user. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Pause] + Successfully created a pause + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses", + method="POST", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Pause]: + """ + Return detailed information about a specific pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Pause] + Successfully retrieved the pause + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, project_pk: int, user_pk: int, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Remove a pause from the database. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + project_pk: int, + user_pk: int, + id: int, + *, + reason: str, + verbose_reason: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Pause]: + """ + Partially update one or more fields of an existing pause. + + Parameters + ---------- + project_pk : int + Project ID + + user_pk : int + User ID + + id : int + Pause ID + + reason : str + + verbose_reason : typing.Optional[str] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Pause] + Successfully updated the pause (partial) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(project_pk)}/members/{jsonable_encoder(user_pk)}/pauses/{jsonable_encoder(id)}", + method="PATCH", + json={ + "reason": reason, + "verbose_reason": verbose_reason, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Pause, + parse_obj_as( + type_=Pause, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/raw_client.py b/src/label_studio_sdk/projects/raw_client.py new file mode 100644 index 000000000..75ddc0920 --- /dev/null +++ b/src/label_studio_sdk/projects/raw_client.py @@ -0,0 +1,1237 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pagination import AsyncPager, BaseHttpResponse, SyncPager +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..types.project import Project +from ..types.project_label_config import ProjectLabelConfig +from .types.projects_create_response import ProjectsCreateResponse +from .types.projects_import_tasks_response import ProjectsImportTasksResponse +from .types.projects_list_response import ProjectsListResponse +from .types.projects_update_response import ProjectsUpdateResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawProjectsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + ordering: typing.Optional[str] = None, + ids: typing.Optional[str] = None, + title: typing.Optional[str] = None, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + workspaces: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> SyncPager[Project]: + """ + + Return a list of the projects within your organization. + + To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. + + To retrieve a list of your Label Studio projects, update the following command to match your own environment. + Replace the domain name, port, and authorization token, then run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' + ``` + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + ids : typing.Optional[str] + ids + + title : typing.Optional[str] + title + + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + workspaces : typing.Optional[int] + workspaces + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SyncPager[Project] + + """ + page = page if page is not None else 1 + + _response = self._client_wrapper.httpx_client.request( + "api/projects/", + method="GET", + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + "workspaces": workspaces, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.results + _has_next = True + _get_next = lambda: self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + workspaces=workspaces, + request_options=request_options, + ) + return SyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ProjectsCreateResponse]: + """ + + Create a project and set up the labeling interface. For more information about setting up projects, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + ```bash + curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' + ``` + + Parameters + ---------- + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectsCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/projects/", + method="POST", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Project]: + """ + Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Project] + Project information + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ProjectsUpdateResponse]: + """ + + Update the project settings for a specific project. For more information, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + + If you are modifying the labeling config for project that has in-progress work, note the following: + * You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. + * If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. + + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectsUpdateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def import_tasks( + self, + id: int, + *, + request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], + commit_to_project: typing.Optional[bool] = None, + return_task_ids: typing.Optional[bool] = None, + preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[ProjectsImportTasksResponse]: + """ + + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. + + + There are three possible ways to import tasks with this endpoint: + + #### 1. **POST with data** + Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. + + Update this example to specify your authorization token and Label Studio instance host, then run the following from + the command line: + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' + ``` + + #### 2. **POST with files** + Send tasks as files. You can attach multiple files with different names. + + - **JSON**: text files in JavaScript object notation format + - **CSV**: text files with tables in Comma Separated Values format + - **TSV**: text files with tables in Tab Separated Value format + - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only + + Update this example to specify your authorization token, Label Studio instance host, and file name and path, + then run the following from the command line: + + ```bash + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' -F 'file=@path/to/my_file.csv' + ``` + + #### 3. **POST with URL** + You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' + ``` + +
+ + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] + + commit_to_project : typing.Optional[bool] + Set to "true" to immediately commit tasks to the project. + + return_task_ids : typing.Optional[bool] + Set to "true" to return task IDs in the response. + + preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectsImportTasksResponse] + Tasks successfully imported + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/import", + method="POST", + params={ + "commit_to_project": commit_to_project, + "return_task_ids": return_task_ids, + "preannotated_from_fields": preannotated_from_fields, + }, + json=request, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def validate_config( + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ProjectLabelConfig]: + """ + + Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + label_config : str + Label config in XML format. See more about it in documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectLabelConfig] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/validate/", + method="POST", + json={ + "label_config": label_config, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawProjectsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + ordering: typing.Optional[str] = None, + ids: typing.Optional[str] = None, + title: typing.Optional[str] = None, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + workspaces: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncPager[Project]: + """ + + Return a list of the projects within your organization. + + To perform most tasks with the Label Studio API, you must specify the project ID, sometimes referred to as the `pk`. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using this API call. + + To retrieve a list of your Label Studio projects, update the following command to match your own environment. + Replace the domain name, port, and authorization token, then run the following from the command line: + ```bash + curl -X GET https://localhost:8080/api/projects/ -H 'Authorization: Token abc123' + ``` + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + ids : typing.Optional[str] + ids + + title : typing.Optional[str] + title + + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + workspaces : typing.Optional[int] + workspaces + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncPager[Project] + + """ + page = page if page is not None else 1 + + _response = await self._client_wrapper.httpx_client.request( + "api/projects/", + method="GET", + params={ + "ordering": ordering, + "ids": ids, + "title": title, + "page": page, + "page_size": page_size, + "workspaces": workspaces, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + ProjectsListResponse, + parse_obj_as( + type_=ProjectsListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.results + _has_next = True + + async def _get_next(): + return await self.list( + ordering=ordering, + ids=ids, + title=title, + page=page + 1, + page_size=page_size, + workspaces=workspaces, + request_options=request_options, + ) + + return AsyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ProjectsCreateResponse]: + """ + + Create a project and set up the labeling interface. For more information about setting up projects, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + ```bash + curl -H Content-Type:application/json -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects' --data '{"label_config": "[...]"}' + ``` + + Parameters + ---------- + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectsCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/projects/", + method="POST", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsCreateResponse, + parse_obj_as( + type_=ProjectsCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Project]: + """ + Retrieve information about a specific project by project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Project] + Project information + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Project, + parse_obj_as( + type_=Project, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a project by specified project ID. Deleting a project permanently removes all tasks, annotations, and project data from Label Studio. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + label_config: typing.Optional[str] = OMIT, + expert_instruction: typing.Optional[str] = OMIT, + show_instruction: typing.Optional[bool] = OMIT, + show_skip_button: typing.Optional[bool] = OMIT, + enable_empty_annotation: typing.Optional[bool] = OMIT, + show_annotation_history: typing.Optional[bool] = OMIT, + reveal_preannotations_interactively: typing.Optional[bool] = OMIT, + show_collab_predictions: typing.Optional[bool] = OMIT, + maximum_annotations: typing.Optional[int] = OMIT, + color: typing.Optional[str] = OMIT, + control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + workspace: typing.Optional[int] = OMIT, + model_version: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ProjectsUpdateResponse]: + """ + + Update the project settings for a specific project. For more information, see the following: + * [Create and configure projects](https://labelstud.io/guide/setup_project) + * [Configure labeling interface](https://labelstud.io/guide/setup) + * [Project settings](https://labelstud.io/guide/project_settings) + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + + If you are modifying the labeling config for project that has in-progress work, note the following: + * You cannot remove labels or change the type of labeling being performed unless you delete any existing annotations that are using those labels. + * If you make changes to the labeling configuration, any tabs that you might have created in the Data Manager are removed. + + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + title : typing.Optional[str] + Project title + + description : typing.Optional[str] + Project description + + label_config : typing.Optional[str] + Label config in XML format + + expert_instruction : typing.Optional[str] + Labeling instructions to show to the user + + show_instruction : typing.Optional[bool] + Show labeling instructions + + show_skip_button : typing.Optional[bool] + Show skip button + + enable_empty_annotation : typing.Optional[bool] + Allow empty annotations + + show_annotation_history : typing.Optional[bool] + Show annotation history + + reveal_preannotations_interactively : typing.Optional[bool] + Reveal preannotations interactively. If set to True, predictions will be shown to the user only after selecting the area of interest + + show_collab_predictions : typing.Optional[bool] + Show predictions to annotators + + maximum_annotations : typing.Optional[int] + Maximum annotations per task + + color : typing.Optional[str] + Project color in HEX format + + control_weights : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} + + workspace : typing.Optional[int] + Workspace ID + + model_version : typing.Optional[str] + Model version + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectsUpdateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "title": title, + "description": description, + "label_config": label_config, + "expert_instruction": expert_instruction, + "show_instruction": show_instruction, + "show_skip_button": show_skip_button, + "enable_empty_annotation": enable_empty_annotation, + "show_annotation_history": show_annotation_history, + "reveal_preannotations_interactively": reveal_preannotations_interactively, + "show_collab_predictions": show_collab_predictions, + "maximum_annotations": maximum_annotations, + "color": color, + "control_weights": control_weights, + "workspace": workspace, + "model_version": model_version, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsUpdateResponse, + parse_obj_as( + type_=ProjectsUpdateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def import_tasks( + self, + id: int, + *, + request: typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]], + commit_to_project: typing.Optional[bool] = None, + return_task_ids: typing.Optional[bool] = None, + preannotated_from_fields: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[ProjectsImportTasksResponse]: + """ + + Use this API endpoint to import labeling tasks in bulk. Note that each POST request is limited at 250K tasks and 200 MB. + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + + Imported data is verified against a project *label_config* and must include all variables that were used in the *label_config*. + + For example, if the label configuration has a *$text* variable, then each item in a data object must include a `text` field. + + + There are three possible ways to import tasks with this endpoint: + + #### 1. **POST with data** + Send JSON tasks as POST data. Only JSON is supported for POSTing files directly. + + Update this example to specify your authorization token and Label Studio instance host, then run the following from + the command line: + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"text": "Some text 1"}, {"text": "Some text 2"}]' + ``` + + #### 2. **POST with files** + Send tasks as files. You can attach multiple files with different names. + + - **JSON**: text files in JavaScript object notation format + - **CSV**: text files with tables in Comma Separated Values format + - **TSV**: text files with tables in Tab Separated Value format + - **TXT**: simple text files are similar to CSV with one column and no header, supported for projects with one source only + + Update this example to specify your authorization token, Label Studio instance host, and file name and path, + then run the following from the command line: + + ```bash + curl -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' -F 'file=@path/to/my_file.csv' + ``` + + #### 3. **POST with URL** + You can also provide a URL to a file with labeling tasks. Supported file formats are the same as in option 2. + + ```bash + curl -H 'Content-Type: application/json' -H 'Authorization: Token abc123' -X POST 'https://localhost:8080/api/projects/1/import' --data '[{"url": "http://example.com/test1.csv"}, {"url": "http://example.com/test2.csv"}]' + ``` + +
+ + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request : typing.Sequence[typing.Dict[str, typing.Optional[typing.Any]]] + + commit_to_project : typing.Optional[bool] + Set to "true" to immediately commit tasks to the project. + + return_task_ids : typing.Optional[bool] + Set to "true" to return task IDs in the response. + + preannotated_from_fields : typing.Optional[typing.Union[str, typing.Sequence[str]]] + List of fields to preannotate from the task data. For example, if you provide a list of `{"text": "text", "prediction": "label"}` items in the request, the system will create a task with the `text` field and a prediction with the `label` field when `preannoted_from_fields=["prediction"]`. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectsImportTasksResponse] + Tasks successfully imported + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/import", + method="POST", + params={ + "commit_to_project": commit_to_project, + "return_task_ids": return_task_ids, + "preannotated_from_fields": preannotated_from_fields, + }, + json=request, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectsImportTasksResponse, + parse_obj_as( + type_=ProjectsImportTasksResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def validate_config( + self, id: int, *, label_config: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ProjectLabelConfig]: + """ + + Determine whether the label configuration for a specific project is valid. For more information about setting up labeling configs, see [Configure labeling interface](https://labelstud.io/guide/setup) and our [Tags reference](https://labelstud.io/tags/). + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + label_config : str + Label config in XML format. See more about it in documentation + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectLabelConfig] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/validate/", + method="POST", + json={ + "label_config": label_config, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectLabelConfig, + parse_obj_as( + type_=ProjectLabelConfig, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/projects/types/__init__.py b/src/label_studio_sdk/projects/types/__init__.py index 8f2e80d13..71e3306e4 100644 --- a/src/label_studio_sdk/projects/types/__init__.py +++ b/src/label_studio_sdk/projects/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .projects_create_response import ProjectsCreateResponse from .projects_import_tasks_response import ProjectsImportTasksResponse from .projects_list_response import ProjectsListResponse diff --git a/src/label_studio_sdk/projects/types/projects_create_response.py b/src/label_studio_sdk/projects/types/projects_create_response.py index b56d864a8..eaadba2f3 100644 --- a/src/label_studio_sdk/projects/types/projects_create_response.py +++ b/src/label_studio_sdk/projects/types/projects_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ProjectsCreateResponse(UniversalBaseModel): @@ -78,7 +78,7 @@ class ProjectsCreateResponse(UniversalBaseModel): control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} """ if IS_PYDANTIC_V2: diff --git a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py index 66adc2d99..24f31d790 100644 --- a/src/label_studio_sdk/projects/types/projects_import_tasks_response.py +++ b/src/label_studio_sdk/projects/types/projects_import_tasks_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ProjectsImportTasksResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_list_response.py b/src/label_studio_sdk/projects/types/projects_list_response.py index 6d16d6b32..98a5363a7 100644 --- a/src/label_studio_sdk/projects/types/projects_list_response.py +++ b/src/label_studio_sdk/projects/types/projects_list_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...types.project import Project -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...types.project import Project class ProjectsListResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/projects/types/projects_update_response.py b/src/label_studio_sdk/projects/types/projects_update_response.py index 5034b9c8b..cce7735f4 100644 --- a/src/label_studio_sdk/projects/types/projects_update_response.py +++ b/src/label_studio_sdk/projects/types/projects_update_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class ProjectsUpdateResponse(UniversalBaseModel): @@ -83,7 +83,7 @@ class ProjectsUpdateResponse(UniversalBaseModel): control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33} + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} """ if IS_PYDANTIC_V2: diff --git a/src/label_studio_sdk/prompts/__init__.py b/src/label_studio_sdk/prompts/__init__.py index a9ec1fd8f..7104d2f12 100644 --- a/src/label_studio_sdk/prompts/__init__.py +++ b/src/label_studio_sdk/prompts/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, PromptsBatchFailedPredictionsResponse, diff --git a/src/label_studio_sdk/prompts/client.py b/src/label_studio_sdk/prompts/client.py index fe30d2947..f57f7765d 100644 --- a/src/label_studio_sdk/prompts/client.py +++ b/src/label_studio_sdk/prompts/client.py @@ -1,35 +1,24 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ..core.client_wrapper import SyncClientWrapper -from .versions.client import VersionsClient -from .runs.client import RunsClient -from .indicators.client import IndicatorsClient + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.prompt import Prompt -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem from ..types.prompt_created_by import PromptCreatedBy -import datetime as dt from ..types.prompt_organization import PromptOrganization -from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.jsonable_encoder import jsonable_encoder -from .types.prompts_batch_predictions_request_results_item import ( - PromptsBatchPredictionsRequestResultsItem, -) -from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse +from .indicators.client import AsyncIndicatorsClient, IndicatorsClient +from .raw_client import AsyncRawPromptsClient, RawPromptsClient +from .runs.client import AsyncRunsClient, RunsClient from .types.prompts_batch_failed_predictions_request_failed_predictions_item import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, ) -from .types.prompts_batch_failed_predictions_response import ( - PromptsBatchFailedPredictionsResponse, -) -from ..core.client_wrapper import AsyncClientWrapper -from .versions.client import AsyncVersionsClient -from .runs.client import AsyncRunsClient -from .indicators.client import AsyncIndicatorsClient +from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse +from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem +from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse +from .versions.client import AsyncVersionsClient, VersionsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -37,10 +26,23 @@ class PromptsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.versions = VersionsClient(client_wrapper=self._client_wrapper) - self.runs = RunsClient(client_wrapper=self._client_wrapper) - self.indicators = IndicatorsClient(client_wrapper=self._client_wrapper) + self._raw_client = RawPromptsClient(client_wrapper=client_wrapper) + self.versions = VersionsClient(client_wrapper=client_wrapper) + + self.runs = RunsClient(client_wrapper=client_wrapper) + + self.indicators = IndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawPromptsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawPromptsClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: """ @@ -65,24 +67,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.prompts.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/prompts/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prompt], - parse_obj_as( - type_=typing.List[Prompt], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -155,47 +141,20 @@ def create( output_classes=["output_classes"], ) """ - _response = self._client_wrapper.httpx_client.request( - "api/prompts/", - method="POST", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptOrganization, - direction="write", - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = self._raw_client.create( + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt: """ @@ -225,24 +184,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -271,18 +214,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -360,51 +293,26 @@ def update( output_classes=["output_classes"], ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptOrganization, - direction="write", - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = self._raw_client.update( + id, + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def batch_predictions( self, *, + num_predictions: typing.Optional[int] = None, modelrun_id: typing.Optional[int] = OMIT, results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, request_options: typing.Optional[RequestOptions] = None, @@ -414,6 +322,9 @@ def batch_predictions( Parameters ---------- + num_predictions : typing.Optional[int] + Number of predictions being sent + modelrun_id : typing.Optional[int] Model Run ID to associate the prediction with @@ -436,40 +347,15 @@ def batch_predictions( ) client.prompts.batch_predictions() """ - _response = self._client_wrapper.httpx_client.request( - "api/model-run/batch-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "results": convert_and_respect_annotation_metadata( - object_=results, - annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = self._raw_client.batch_predictions( + num_predictions=num_predictions, modelrun_id=modelrun_id, results=results, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchPredictionsResponse, - parse_obj_as( - type_=PromptsBatchPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def batch_failed_predictions( self, *, + num_failed_predictions: typing.Optional[int] = None, modelrun_id: typing.Optional[int] = OMIT, failed_predictions: typing.Optional[ typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] @@ -481,6 +367,9 @@ def batch_failed_predictions( Parameters ---------- + num_failed_predictions : typing.Optional[int] + Number of failed predictions being sent + modelrun_id : typing.Optional[int] Model Run ID where the failed predictions came from @@ -503,44 +392,34 @@ def batch_failed_predictions( ) client.prompts.batch_failed_predictions() """ - _response = self._client_wrapper.httpx_client.request( - "api/model-run/batch-failed-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "failed_predictions": convert_and_respect_annotation_metadata( - object_=failed_predictions, - annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.batch_failed_predictions( + num_failed_predictions=num_failed_predictions, + modelrun_id=modelrun_id, + failed_predictions=failed_predictions, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchFailedPredictionsResponse, - parse_obj_as( - type_=PromptsBatchFailedPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncPromptsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.versions = AsyncVersionsClient(client_wrapper=self._client_wrapper) - self.runs = AsyncRunsClient(client_wrapper=self._client_wrapper) - self.indicators = AsyncIndicatorsClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawPromptsClient(client_wrapper=client_wrapper) + self.versions = AsyncVersionsClient(client_wrapper=client_wrapper) + + self.runs = AsyncRunsClient(client_wrapper=client_wrapper) + + self.indicators = AsyncIndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawPromptsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawPromptsClient + """ + return self._raw_client async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Prompt]: """ @@ -573,24 +452,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/prompts/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Prompt], - parse_obj_as( - type_=typing.List[Prompt], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -671,47 +534,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/prompts/", - method="POST", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptOrganization, - direction="write", - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = await self._raw_client.create( + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Prompt: """ @@ -749,24 +585,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -803,18 +623,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -900,51 +710,26 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, annotation=PromptCreatedBy, direction="write" - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptOrganization, - direction="write", - ), - "input_fields": input_fields, - "output_classes": output_classes, - "associated_projects": convert_and_respect_annotation_metadata( - object_=associated_projects, - annotation=typing.Sequence[PromptAssociatedProjectsItem], - direction="write", - ), - "skill_name": skill_name, - }, + _response = await self._raw_client.update( + id, + title=title, + input_fields=input_fields, + output_classes=output_classes, + description=description, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, + associated_projects=associated_projects, + skill_name=skill_name, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Prompt, - parse_obj_as( - type_=Prompt, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def batch_predictions( self, *, + num_predictions: typing.Optional[int] = None, modelrun_id: typing.Optional[int] = OMIT, results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, request_options: typing.Optional[RequestOptions] = None, @@ -954,6 +739,9 @@ async def batch_predictions( Parameters ---------- + num_predictions : typing.Optional[int] + Number of predictions being sent + modelrun_id : typing.Optional[int] Model Run ID to associate the prediction with @@ -984,40 +772,15 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-run/batch-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "results": convert_and_respect_annotation_metadata( - object_=results, - annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, + _response = await self._raw_client.batch_predictions( + num_predictions=num_predictions, modelrun_id=modelrun_id, results=results, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchPredictionsResponse, - parse_obj_as( - type_=PromptsBatchPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def batch_failed_predictions( self, *, + num_failed_predictions: typing.Optional[int] = None, modelrun_id: typing.Optional[int] = OMIT, failed_predictions: typing.Optional[ typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] @@ -1029,6 +792,9 @@ async def batch_failed_predictions( Parameters ---------- + num_failed_predictions : typing.Optional[int] + Number of failed predictions being sent + modelrun_id : typing.Optional[int] Model Run ID where the failed predictions came from @@ -1059,33 +825,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/model-run/batch-failed-predictions", - method="POST", - json={ - "modelrun_id": modelrun_id, - "failed_predictions": convert_and_respect_annotation_metadata( - object_=failed_predictions, - annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], - direction="write", - ), - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.batch_failed_predictions( + num_failed_predictions=num_failed_predictions, + modelrun_id=modelrun_id, + failed_predictions=failed_predictions, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptsBatchFailedPredictionsResponse, - parse_obj_as( - type_=PromptsBatchFailedPredictionsResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/prompts/indicators/__init__.py b/src/label_studio_sdk/prompts/indicators/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/prompts/indicators/__init__.py +++ b/src/label_studio_sdk/prompts/indicators/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/prompts/indicators/client.py b/src/label_studio_sdk/prompts/indicators/client.py index 58b7ac155..b3ec7bc01 100644 --- a/src/label_studio_sdk/prompts/indicators/client.py +++ b/src/label_studio_sdk/prompts/indicators/client.py @@ -1,20 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.client_wrapper import SyncClientWrapper import typing + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions -from ...types.key_indicators import KeyIndicators -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError from ...types.key_indicator_value import KeyIndicatorValue -from ...core.client_wrapper import AsyncClientWrapper +from ...types.key_indicators import KeyIndicators +from .raw_client import AsyncRawIndicatorsClient, RawIndicatorsClient class IndicatorsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawIndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawIndicatorsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawIndicatorsClient + """ + return self._raw_client def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> KeyIndicators: """ @@ -44,31 +52,11 @@ def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = No pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicators, - parse_obj_as( - type_=KeyIndicators, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(pk, request_options=request_options) + return _response.data def get( - self, - indicator_key: str, - pk: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None ) -> KeyIndicatorValue: """ Get a specific key indicator for the Prompt dashboard. @@ -101,29 +89,24 @@ def get( pk=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicatorValue, - parse_obj_as( - type_=KeyIndicatorValue, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(indicator_key, pk, request_options=request_options) + return _response.data class AsyncIndicatorsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawIndicatorsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawIndicatorsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawIndicatorsClient + """ + return self._raw_client async def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> KeyIndicators: """ @@ -161,31 +144,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicators, - parse_obj_as( - type_=KeyIndicators, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(pk, request_options=request_options) + return _response.data async def get( - self, - indicator_key: str, - pk: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None ) -> KeyIndicatorValue: """ Get a specific key indicator for the Prompt dashboard. @@ -226,21 +189,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - KeyIndicatorValue, - parse_obj_as( - type_=KeyIndicatorValue, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(indicator_key, pk, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/prompts/indicators/raw_client.py b/src/label_studio_sdk/prompts/indicators/raw_client.py new file mode 100644 index 000000000..43bdb6b8e --- /dev/null +++ b/src/label_studio_sdk/prompts/indicators/raw_client.py @@ -0,0 +1,183 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...types.key_indicator_value import KeyIndicatorValue +from ...types.key_indicators import KeyIndicators + + +class RawIndicatorsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, pk: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[KeyIndicators]: + """ + Get key indicators for the Prompt dashboard. + + Parameters + ---------- + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[KeyIndicators] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[KeyIndicatorValue]: + """ + Get a specific key indicator for the Prompt dashboard. + + Parameters + ---------- + indicator_key : str + Key of the indicator + + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[KeyIndicatorValue] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawIndicatorsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[KeyIndicators]: + """ + Get key indicators for the Prompt dashboard. + + Parameters + ---------- + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[KeyIndicators] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicators, + parse_obj_as( + type_=KeyIndicators, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, indicator_key: str, pk: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[KeyIndicatorValue]: + """ + Get a specific key indicator for the Prompt dashboard. + + Parameters + ---------- + indicator_key : str + Key of the indicator + + pk : int + Inference run ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[KeyIndicatorValue] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/inference-runs/{jsonable_encoder(pk)}/indicators/{jsonable_encoder(indicator_key)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + KeyIndicatorValue, + parse_obj_as( + type_=KeyIndicatorValue, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/raw_client.py b/src/label_studio_sdk/prompts/raw_client.py new file mode 100644 index 000000000..0b8811e46 --- /dev/null +++ b/src/label_studio_sdk/prompts/raw_client.py @@ -0,0 +1,918 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.prompt import Prompt +from ..types.prompt_associated_projects_item import PromptAssociatedProjectsItem +from ..types.prompt_created_by import PromptCreatedBy +from ..types.prompt_organization import PromptOrganization +from .types.prompts_batch_failed_predictions_request_failed_predictions_item import ( + PromptsBatchFailedPredictionsRequestFailedPredictionsItem, +) +from .types.prompts_batch_failed_predictions_response import PromptsBatchFailedPredictionsResponse +from .types.prompts_batch_predictions_request_results_item import PromptsBatchPredictionsRequestResultsItem +from .types.prompts_batch_predictions_response import PromptsBatchPredictionsResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawPromptsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[Prompt]]: + """ + Get a list of prompts. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Prompt]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prompt]: + """ + Create a new prompt. + + Parameters + ---------- + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Prompt]: + """ + Get a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Prompt]: + """ + Update a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Prompt] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def batch_predictions( + self, + *, + num_predictions: typing.Optional[int] = None, + modelrun_id: typing.Optional[int] = OMIT, + results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptsBatchPredictionsResponse]: + """ + Create a new batch prediction. + + Parameters + ---------- + num_predictions : typing.Optional[int] + Number of predictions being sent + + modelrun_id : typing.Optional[int] + Model Run ID to associate the prediction with + + results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptsBatchPredictionsResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + params={ + "num_predictions": num_predictions, + }, + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def batch_failed_predictions( + self, + *, + num_failed_predictions: typing.Optional[int] = None, + modelrun_id: typing.Optional[int] = OMIT, + failed_predictions: typing.Optional[ + typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] + ] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptsBatchFailedPredictionsResponse]: + """ + Create a new batch of failed predictions. + + Parameters + ---------- + num_failed_predictions : typing.Optional[int] + Number of failed predictions being sent + + modelrun_id : typing.Optional[int] + Model Run ID where the failed predictions came from + + failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptsBatchFailedPredictionsResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/model-run/batch-failed-predictions", + method="POST", + params={ + "num_failed_predictions": num_failed_predictions, + }, + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawPromptsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Prompt]]: + """ + Get a list of prompts. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Prompt]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Prompt], + parse_obj_as( + type_=typing.List[Prompt], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prompt]: + """ + Create a new prompt. + + Parameters + ---------- + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/prompts/", + method="POST", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Prompt]: + """ + Get a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: str, + input_fields: typing.Sequence[str], + output_classes: typing.Sequence[str], + description: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptOrganization] = OMIT, + associated_projects: typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] = OMIT, + skill_name: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Prompt]: + """ + Update a prompt by ID. + + Parameters + ---------- + id : int + Prompt ID + + title : str + Title of the prompt + + input_fields : typing.Sequence[str] + List of input fields + + output_classes : typing.Sequence[str] + List of output classes + + description : typing.Optional[str] + Description of the prompt + + created_by : typing.Optional[PromptCreatedBy] + User ID of the creator of the prompt + + created_at : typing.Optional[dt.datetime] + Date and time the prompt was created + + updated_at : typing.Optional[dt.datetime] + Date and time the prompt was last updated + + organization : typing.Optional[PromptOrganization] + Organization ID of the prompt + + associated_projects : typing.Optional[typing.Sequence[PromptAssociatedProjectsItem]] + List of associated projects IDs or objects + + skill_name : typing.Optional[str] + Name of the skill + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Prompt] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptOrganization, direction="write" + ), + "input_fields": input_fields, + "output_classes": output_classes, + "associated_projects": convert_and_respect_annotation_metadata( + object_=associated_projects, + annotation=typing.Sequence[PromptAssociatedProjectsItem], + direction="write", + ), + "skill_name": skill_name, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Prompt, + parse_obj_as( + type_=Prompt, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def batch_predictions( + self, + *, + num_predictions: typing.Optional[int] = None, + modelrun_id: typing.Optional[int] = OMIT, + results: typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptsBatchPredictionsResponse]: + """ + Create a new batch prediction. + + Parameters + ---------- + num_predictions : typing.Optional[int] + Number of predictions being sent + + modelrun_id : typing.Optional[int] + Model Run ID to associate the prediction with + + results : typing.Optional[typing.Sequence[PromptsBatchPredictionsRequestResultsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptsBatchPredictionsResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-run/batch-predictions", + method="POST", + params={ + "num_predictions": num_predictions, + }, + json={ + "modelrun_id": modelrun_id, + "results": convert_and_respect_annotation_metadata( + object_=results, + annotation=typing.Sequence[PromptsBatchPredictionsRequestResultsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchPredictionsResponse, + parse_obj_as( + type_=PromptsBatchPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def batch_failed_predictions( + self, + *, + num_failed_predictions: typing.Optional[int] = None, + modelrun_id: typing.Optional[int] = OMIT, + failed_predictions: typing.Optional[ + typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem] + ] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptsBatchFailedPredictionsResponse]: + """ + Create a new batch of failed predictions. + + Parameters + ---------- + num_failed_predictions : typing.Optional[int] + Number of failed predictions being sent + + modelrun_id : typing.Optional[int] + Model Run ID where the failed predictions came from + + failed_predictions : typing.Optional[typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem]] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptsBatchFailedPredictionsResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/model-run/batch-failed-predictions", + method="POST", + params={ + "num_failed_predictions": num_failed_predictions, + }, + json={ + "modelrun_id": modelrun_id, + "failed_predictions": convert_and_respect_annotation_metadata( + object_=failed_predictions, + annotation=typing.Sequence[PromptsBatchFailedPredictionsRequestFailedPredictionsItem], + direction="write", + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptsBatchFailedPredictionsResponse, + parse_obj_as( + type_=PromptsBatchFailedPredictionsResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/runs/__init__.py b/src/label_studio_sdk/prompts/runs/__init__.py index b92e5ed7e..549df0e78 100644 --- a/src/label_studio_sdk/prompts/runs/__init__.py +++ b/src/label_studio_sdk/prompts/runs/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import RunsListRequestProjectSubset __all__ = ["RunsListRequestProjectSubset"] diff --git a/src/label_studio_sdk/prompts/runs/client.py b/src/label_studio_sdk/prompts/runs/client.py index f0edce47e..4adb7cf23 100644 --- a/src/label_studio_sdk/prompts/runs/client.py +++ b/src/label_studio_sdk/prompts/runs/client.py @@ -1,21 +1,17 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ...core.client_wrapper import SyncClientWrapper -from .types.runs_list_request_project_subset import RunsListRequestProjectSubset + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions from ...types.inference_run import InferenceRun -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...types.inference_run_project_subset import InferenceRunProjectSubset -from ...types.inference_run_organization import InferenceRunOrganization from ...types.inference_run_created_by import InferenceRunCreatedBy +from ...types.inference_run_organization import InferenceRunOrganization +from ...types.inference_run_project_subset import InferenceRunProjectSubset from ...types.inference_run_status import InferenceRunStatus -import datetime as dt -from ...core.serialization import convert_and_respect_annotation_metadata -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawRunsClient, RawRunsClient +from .types.runs_list_request_project_subset import RunsListRequestProjectSubset # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +19,18 @@ class RunsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawRunsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawRunsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawRunsClient + """ + return self._raw_client def list( self, @@ -73,28 +80,10 @@ def list( project_subset="All", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="GET", - params={ - "project": project, - "project_subset": project_subset, - }, - request_options=request_options, + _response = self._raw_client.list( + id, version_id, project=project, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def create( self, @@ -169,51 +158,39 @@ def create( project_subset="All", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="POST", - json={ - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=InferenceRunOrganization, - direction="write", - ), - "project": project, - "model_version": model_version, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=InferenceRunCreatedBy, - direction="write", - ), - "project_subset": project_subset, - "status": status, - "job_id": job_id, - "created_at": created_at, - "triggered_at": triggered_at, - "predictions_updated_at": predictions_updated_at, - "completed_at": completed_at, - }, + _response = self._raw_client.create( + id, + version_id, + project=project, + project_subset=project_subset, + organization=organization, + model_version=model_version, + created_by=created_by, + status=status, + job_id=job_id, + created_at=created_at, + triggered_at=triggered_at, + predictions_updated_at=predictions_updated_at, + completed_at=completed_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncRunsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawRunsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawRunsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawRunsClient + """ + return self._raw_client async def list( self, @@ -271,28 +248,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="GET", - params={ - "project": project, - "project_subset": project_subset, - }, - request_options=request_options, + _response = await self._raw_client.list( + id, version_id, project=project, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def create( self, @@ -375,43 +334,20 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", - method="POST", - json={ - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=InferenceRunOrganization, - direction="write", - ), - "project": project, - "model_version": model_version, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=InferenceRunCreatedBy, - direction="write", - ), - "project_subset": project_subset, - "status": status, - "job_id": job_id, - "created_at": created_at, - "triggered_at": triggered_at, - "predictions_updated_at": predictions_updated_at, - "completed_at": completed_at, - }, + _response = await self._raw_client.create( + id, + version_id, + project=project, + project_subset=project_subset, + organization=organization, + model_version=model_version, + created_by=created_by, + status=status, + job_id=job_id, + created_at=created_at, + triggered_at=triggered_at, + predictions_updated_at=predictions_updated_at, + completed_at=completed_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRun, - parse_obj_as( - type_=InferenceRun, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/prompts/runs/raw_client.py b/src/label_studio_sdk/prompts/runs/raw_client.py new file mode 100644 index 000000000..e2c694d33 --- /dev/null +++ b/src/label_studio_sdk/prompts/runs/raw_client.py @@ -0,0 +1,348 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.inference_run import InferenceRun +from ...types.inference_run_created_by import InferenceRunCreatedBy +from ...types.inference_run_organization import InferenceRunOrganization +from ...types.inference_run_project_subset import InferenceRunProjectSubset +from ...types.inference_run_status import InferenceRunStatus +from .types.runs_list_request_project_subset import RunsListRequestProjectSubset + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawRunsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: RunsListRequestProjectSubset, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[InferenceRun]: + """ + Get information (status, etadata, etc) about an existing inference run + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + The ID of the project that this Interence Run makes predictions on + + project_subset : RunsListRequestProjectSubset + Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[InferenceRun] + Success + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="GET", + params={ + "project": project, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: InferenceRunProjectSubset, + organization: typing.Optional[InferenceRunOrganization] = OMIT, + model_version: typing.Optional[int] = OMIT, + created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, + status: typing.Optional[InferenceRunStatus] = OMIT, + job_id: typing.Optional[str] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + triggered_at: typing.Optional[dt.datetime] = OMIT, + predictions_updated_at: typing.Optional[dt.datetime] = OMIT, + completed_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[InferenceRun]: + """ + Run a prompt inference. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + + project_subset : InferenceRunProjectSubset + + organization : typing.Optional[InferenceRunOrganization] + + model_version : typing.Optional[int] + + created_by : typing.Optional[InferenceRunCreatedBy] + + status : typing.Optional[InferenceRunStatus] + + job_id : typing.Optional[str] + + created_at : typing.Optional[dt.datetime] + + triggered_at : typing.Optional[dt.datetime] + + predictions_updated_at : typing.Optional[dt.datetime] + + completed_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[InferenceRun] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=InferenceRunOrganization, direction="write" + ), + "project": project, + "model_version": model_version, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=InferenceRunCreatedBy, direction="write" + ), + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawRunsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: RunsListRequestProjectSubset, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[InferenceRun]: + """ + Get information (status, etadata, etc) about an existing inference run + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + The ID of the project that this Interence Run makes predictions on + + project_subset : RunsListRequestProjectSubset + Defines which tasks are operated on (e.g. HasGT will only operate on tasks with a ground truth annotation, but All will operate on all records) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[InferenceRun] + Success + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="GET", + params={ + "project": project, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + id: int, + version_id: int, + *, + project: int, + project_subset: InferenceRunProjectSubset, + organization: typing.Optional[InferenceRunOrganization] = OMIT, + model_version: typing.Optional[int] = OMIT, + created_by: typing.Optional[InferenceRunCreatedBy] = OMIT, + status: typing.Optional[InferenceRunStatus] = OMIT, + job_id: typing.Optional[str] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + triggered_at: typing.Optional[dt.datetime] = OMIT, + predictions_updated_at: typing.Optional[dt.datetime] = OMIT, + completed_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[InferenceRun]: + """ + Run a prompt inference. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + project : int + + project_subset : InferenceRunProjectSubset + + organization : typing.Optional[InferenceRunOrganization] + + model_version : typing.Optional[int] + + created_by : typing.Optional[InferenceRunCreatedBy] + + status : typing.Optional[InferenceRunStatus] + + job_id : typing.Optional[str] + + created_at : typing.Optional[dt.datetime] + + triggered_at : typing.Optional[dt.datetime] + + predictions_updated_at : typing.Optional[dt.datetime] + + completed_at : typing.Optional[dt.datetime] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[InferenceRun] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}/inference-runs", + method="POST", + json={ + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=InferenceRunOrganization, direction="write" + ), + "project": project, + "model_version": model_version, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=InferenceRunCreatedBy, direction="write" + ), + "project_subset": project_subset, + "status": status, + "job_id": job_id, + "created_at": created_at, + "triggered_at": triggered_at, + "predictions_updated_at": predictions_updated_at, + "completed_at": completed_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRun, + parse_obj_as( + type_=InferenceRun, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/prompts/runs/types/__init__.py b/src/label_studio_sdk/prompts/runs/types/__init__.py index 81dbca787..8b1f7f214 100644 --- a/src/label_studio_sdk/prompts/runs/types/__init__.py +++ b/src/label_studio_sdk/prompts/runs/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .runs_list_request_project_subset import RunsListRequestProjectSubset __all__ = ["RunsListRequestProjectSubset"] diff --git a/src/label_studio_sdk/prompts/types/__init__.py b/src/label_studio_sdk/prompts/types/__init__.py index aa63b5ae5..cd753f61e 100644 --- a/src/label_studio_sdk/prompts/types/__init__.py +++ b/src/label_studio_sdk/prompts/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .prompts_batch_failed_predictions_request_failed_predictions_item import ( PromptsBatchFailedPredictionsRequestFailedPredictionsItem, ) diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py index f58cf15b8..87c9cbf41 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchFailedPredictionsRequestFailedPredictionsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py index 210085456..ad4d5a758 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_failed_predictions_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchFailedPredictionsResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py index d46f78c4d..43c268c94 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchPredictionsRequestResultsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py index befabdace..30463c2be 100644 --- a/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py +++ b/src/label_studio_sdk/prompts/types/prompts_batch_predictions_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptsBatchPredictionsResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/prompts/versions/__init__.py b/src/label_studio_sdk/prompts/versions/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/prompts/versions/__init__.py +++ b/src/label_studio_sdk/prompts/versions/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/prompts/versions/client.py b/src/label_studio_sdk/prompts/versions/client.py index a5b1cd3a4..7baf50d7c 100644 --- a/src/label_studio_sdk/prompts/versions/client.py +++ b/src/label_studio_sdk/prompts/versions/client.py @@ -1,21 +1,17 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions +from ...types.inference_run_cost_estimate import InferenceRunCostEstimate from ...types.prompt_version import PromptVersion -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError -from ...types.prompt_version_provider import PromptVersionProvider from ...types.prompt_version_created_by import PromptVersionCreatedBy -import datetime as dt from ...types.prompt_version_organization import PromptVersionOrganization -from ...core.serialization import convert_and_respect_annotation_metadata -from ...types.inference_run_cost_estimate import InferenceRunCostEstimate +from ...types.prompt_version_provider import PromptVersionProvider from ...types.refined_prompt_response import RefinedPromptResponse -from ...core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawVersionsClient, RawVersionsClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +19,18 @@ class VersionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawVersionsClient + """ + return self._raw_client def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[PromptVersion]: """ @@ -53,24 +60,8 @@ def list(self, id: int, *, request_options: typing.Optional[RequestOptions] = No id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[PromptVersion], - parse_obj_as( - type_=typing.List[PromptVersion], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, request_options=request_options) + return _response.data def create( self, @@ -135,52 +126,24 @@ def create( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="POST", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=PromptVersionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptVersionOrganization, - direction="write", - ), - }, + _response = self._raw_client.create( + id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get( - self, - id: int, - version_id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> PromptVersion: """ Get a prompt version by ID. @@ -213,32 +176,10 @@ def get( version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def delete( - self, - id: int, - version_id: int, - *, - request_options: typing.Optional[RequestOptions] = None, - ) -> None: + _response = self._raw_client.get(id, version_id, request_options=request_options) + return _response.data + + def delete(self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ Delete a prompt version by ID. @@ -269,18 +210,8 @@ def delete( version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, version_id, request_options=request_options) + return _response.data def update( self, @@ -350,45 +281,22 @@ def update( version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="PATCH", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=PromptVersionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptVersionOrganization, - direction="write", - ), - }, + _response = self._raw_client.update( + id, + version_id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def cost_estimate( self, @@ -438,28 +346,10 @@ def cost_estimate( project_subset=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", - method="POST", - params={ - "project_id": project_id, - "project_subset": project_subset, - }, - request_options=request_options, + _response = self._raw_client.cost_estimate( + prompt_id, version_id, project_id=project_id, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRunCostEstimate, - parse_obj_as( - type_=InferenceRunCostEstimate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get_refined_prompt( self, @@ -504,27 +394,10 @@ def get_refined_prompt( refinement_job_id="refinement_job_id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="GET", - params={ - "refinement_job_id": refinement_job_id, - }, - request_options=request_options, + _response = self._raw_client.get_refined_prompt( + prompt_id, version_id, refinement_job_id=refinement_job_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def refine_prompt( self, @@ -580,41 +453,32 @@ def refine_prompt( version_id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="POST", - params={ - "async": async_, - }, - json={ - "teacher_model_provider_connection_id": teacher_model_provider_connection_id, - "teacher_model_name": teacher_model_name, - "project_id": project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.refine_prompt( + prompt_id, + version_id, + async_=async_, + teacher_model_provider_connection_id=teacher_model_provider_connection_id, + teacher_model_name=teacher_model_name, + project_id=project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncVersionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawVersionsClient + """ + return self._raw_client async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -654,24 +518,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[PromptVersion], - parse_obj_as( - type_=typing.List[PromptVersion], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, request_options=request_options) + return _response.data async def create( self, @@ -744,52 +592,24 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions", - method="POST", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=PromptVersionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptVersionOrganization, - direction="write", - ), - }, + _response = await self._raw_client.create( + id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get( - self, - id: int, - version_id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> PromptVersion: """ Get a prompt version by ID. @@ -830,31 +650,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, version_id, request_options=request_options) + return _response.data async def delete( - self, - id: int, - version_id: int, - *, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None ) -> None: """ Delete a prompt version by ID. @@ -894,18 +694,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, version_id, request_options=request_options) + return _response.data async def update( self, @@ -983,45 +773,22 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", - method="PATCH", - json={ - "title": title, - "parent_model": parent_model, - "model_provider_connection": model_provider_connection, - "prompt": prompt, - "provider": provider, - "provider_model_id": provider_model_id, - "created_by": convert_and_respect_annotation_metadata( - object_=created_by, - annotation=PromptVersionCreatedBy, - direction="write", - ), - "created_at": created_at, - "updated_at": updated_at, - "organization": convert_and_respect_annotation_metadata( - object_=organization, - annotation=PromptVersionOrganization, - direction="write", - ), - }, + _response = await self._raw_client.update( + id, + version_id, + title=title, + parent_model=parent_model, + model_provider_connection=model_provider_connection, + prompt=prompt, + provider=provider, + provider_model_id=provider_model_id, + created_by=created_by, + created_at=created_at, + updated_at=updated_at, + organization=organization, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - PromptVersion, - parse_obj_as( - type_=PromptVersion, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def cost_estimate( self, @@ -1079,28 +846,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", - method="POST", - params={ - "project_id": project_id, - "project_subset": project_subset, - }, - request_options=request_options, + _response = await self._raw_client.cost_estimate( + prompt_id, version_id, project_id=project_id, project_subset=project_subset, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - InferenceRunCostEstimate, - parse_obj_as( - type_=InferenceRunCostEstimate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get_refined_prompt( self, @@ -1153,27 +902,10 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="GET", - params={ - "refinement_job_id": refinement_job_id, - }, - request_options=request_options, + _response = await self._raw_client.get_refined_prompt( + prompt_id, version_id, refinement_job_id=refinement_job_id, request_options=request_options ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def refine_prompt( self, @@ -1237,33 +969,13 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", - method="POST", - params={ - "async": async_, - }, - json={ - "teacher_model_provider_connection_id": teacher_model_provider_connection_id, - "teacher_model_name": teacher_model_name, - "project_id": project_id, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.refine_prompt( + prompt_id, + version_id, + async_=async_, + teacher_model_provider_connection_id=teacher_model_provider_connection_id, + teacher_model_name=teacher_model_name, + project_id=project_id, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RefinedPromptResponse, - parse_obj_as( - type_=RefinedPromptResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/prompts/versions/raw_client.py b/src/label_studio_sdk/prompts/versions/raw_client.py new file mode 100644 index 000000000..2892a6e79 --- /dev/null +++ b/src/label_studio_sdk/prompts/versions/raw_client.py @@ -0,0 +1,1008 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from ...core.serialization import convert_and_respect_annotation_metadata +from ...types.inference_run_cost_estimate import InferenceRunCostEstimate +from ...types.prompt_version import PromptVersion +from ...types.prompt_version_created_by import PromptVersionCreatedBy +from ...types.prompt_version_organization import PromptVersionOrganization +from ...types.prompt_version_provider import PromptVersionProvider +from ...types.refined_prompt_response import RefinedPromptResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawVersionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[PromptVersion]]: + """ + Get a list of prompt versions. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[PromptVersion]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptVersion]: + """ + Create a new version of a prompt. + + Parameters + ---------- + id : int + Prompt ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptVersion] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[PromptVersion]: + """ + Get a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptVersion] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + Delete a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + version_id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[PromptVersion]: + """ + Update a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[PromptVersion] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="PATCH", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def cost_estimate( + self, + prompt_id: int, + version_id: int, + *, + project_id: int, + project_subset: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[InferenceRunCostEstimate]: + """ + Get cost estimate for running a prompt version on a particular project/subset + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + project_id : int + ID of the project to get an estimate for running on + + project_subset : int + Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT') + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[InferenceRunCostEstimate] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", + method="POST", + params={ + "project_id": project_id, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get_refined_prompt( + self, + prompt_id: int, + version_id: int, + *, + refinement_job_id: str, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RefinedPromptResponse]: + """ + Get the refined prompt based on the `refinement_job_id`. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + refinement_job_id : str + Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RefinedPromptResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="GET", + params={ + "refinement_job_id": refinement_job_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def refine_prompt( + self, + prompt_id: int, + version_id: int, + *, + async_: typing.Optional[bool] = None, + teacher_model_provider_connection_id: typing.Optional[int] = OMIT, + teacher_model_name: typing.Optional[str] = OMIT, + project_id: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[RefinedPromptResponse]: + """ + Refine a prompt version using a teacher model and save the refined prompt as a new version. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Base Prompt Version ID + + async_ : typing.Optional[bool] + Run the refinement job asynchronously + + teacher_model_provider_connection_id : typing.Optional[int] + Model Provider Connection ID to use to refine the prompt + + teacher_model_name : typing.Optional[str] + Name of the model to use to refine the prompt + + project_id : typing.Optional[int] + Project ID to target the refined prompt for + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RefinedPromptResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="POST", + params={ + "async": async_, + }, + json={ + "teacher_model_provider_connection_id": teacher_model_provider_connection_id, + "teacher_model_name": teacher_model_name, + "project_id": project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawVersionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[PromptVersion]]: + """ + Get a list of prompt versions. + + Parameters + ---------- + id : int + Prompt ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[PromptVersion]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[PromptVersion], + parse_obj_as( + type_=typing.List[PromptVersion], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptVersion]: + """ + Create a new version of a prompt. + + Parameters + ---------- + id : int + Prompt ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptVersion] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions", + method="POST", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[PromptVersion]: + """ + Get a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptVersion] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + version_id: int, + *, + title: typing.Optional[str] = OMIT, + parent_model: typing.Optional[int] = OMIT, + model_provider_connection: typing.Optional[int] = OMIT, + prompt: typing.Optional[str] = OMIT, + provider: typing.Optional[PromptVersionProvider] = OMIT, + provider_model_id: typing.Optional[str] = OMIT, + created_by: typing.Optional[PromptVersionCreatedBy] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + organization: typing.Optional[PromptVersionOrganization] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[PromptVersion]: + """ + Update a prompt version by ID. + + Parameters + ---------- + id : int + Prompt ID + + version_id : int + Prompt Version ID + + title : typing.Optional[str] + + parent_model : typing.Optional[int] + + model_provider_connection : typing.Optional[int] + + prompt : typing.Optional[str] + + provider : typing.Optional[PromptVersionProvider] + + provider_model_id : typing.Optional[str] + + created_by : typing.Optional[PromptVersionCreatedBy] + + created_at : typing.Optional[dt.datetime] + + updated_at : typing.Optional[dt.datetime] + + organization : typing.Optional[PromptVersionOrganization] + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[PromptVersion] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_id)}", + method="PATCH", + json={ + "title": title, + "parent_model": parent_model, + "model_provider_connection": model_provider_connection, + "prompt": prompt, + "provider": provider, + "provider_model_id": provider_model_id, + "created_by": convert_and_respect_annotation_metadata( + object_=created_by, annotation=PromptVersionCreatedBy, direction="write" + ), + "created_at": created_at, + "updated_at": updated_at, + "organization": convert_and_respect_annotation_metadata( + object_=organization, annotation=PromptVersionOrganization, direction="write" + ), + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + PromptVersion, + parse_obj_as( + type_=PromptVersion, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def cost_estimate( + self, + prompt_id: int, + version_id: int, + *, + project_id: int, + project_subset: int, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[InferenceRunCostEstimate]: + """ + Get cost estimate for running a prompt version on a particular project/subset + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + project_id : int + ID of the project to get an estimate for running on + + project_subset : int + Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT') + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[InferenceRunCostEstimate] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate", + method="POST", + params={ + "project_id": project_id, + "project_subset": project_subset, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + InferenceRunCostEstimate, + parse_obj_as( + type_=InferenceRunCostEstimate, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get_refined_prompt( + self, + prompt_id: int, + version_id: int, + *, + refinement_job_id: str, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RefinedPromptResponse]: + """ + Get the refined prompt based on the `refinement_job_id`. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Prompt Version ID + + refinement_job_id : str + Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RefinedPromptResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="GET", + params={ + "refinement_job_id": refinement_job_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def refine_prompt( + self, + prompt_id: int, + version_id: int, + *, + async_: typing.Optional[bool] = None, + teacher_model_provider_connection_id: typing.Optional[int] = OMIT, + teacher_model_name: typing.Optional[str] = OMIT, + project_id: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[RefinedPromptResponse]: + """ + Refine a prompt version using a teacher model and save the refined prompt as a new version. + + Parameters + ---------- + prompt_id : int + Prompt ID + + version_id : int + Base Prompt Version ID + + async_ : typing.Optional[bool] + Run the refinement job asynchronously + + teacher_model_provider_connection_id : typing.Optional[int] + Model Provider Connection ID to use to refine the prompt + + teacher_model_name : typing.Optional[str] + Name of the model to use to refine the prompt + + project_id : typing.Optional[int] + Project ID to target the refined prompt for + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RefinedPromptResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/refine", + method="POST", + params={ + "async": async_, + }, + json={ + "teacher_model_provider_connection_id": teacher_model_provider_connection_id, + "teacher_model_name": teacher_model_name, + "project_id": project_id, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RefinedPromptResponse, + parse_obj_as( + type_=RefinedPromptResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/tasks/__init__.py b/src/label_studio_sdk/tasks/__init__.py index 06c13c543..f5f953fad 100644 --- a/src/label_studio_sdk/tasks/__init__.py +++ b/src/label_studio_sdk/tasks/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import TasksListRequestFields, TasksListResponse __all__ = ["TasksListRequestFields", "TasksListResponse"] diff --git a/src/label_studio_sdk/tasks/client.py b/src/label_studio_sdk/tasks/client.py index 797658b3e..1152d0300 100644 --- a/src/label_studio_sdk/tasks/client.py +++ b/src/label_studio_sdk/tasks/client.py @@ -1,21 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.pagination import AsyncPager, SyncPager from ..core.request_options import RequestOptions -from ..types.project_import import ProjectImport -from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.tasks_list_request_fields import TasksListRequestFields -from ..core.pagination import SyncPager -from ..types.task import Task -from .types.tasks_list_response import TasksListResponse from ..types.base_task import BaseTask from ..types.data_manager_task_serializer import DataManagerTaskSerializer -from ..core.client_wrapper import AsyncClientWrapper -from ..core.pagination import AsyncPager +from ..types.project_import import ProjectImport +from ..types.task import Task +from .raw_client import AsyncRawTasksClient, RawTasksClient +from .types.tasks_list_request_fields import TasksListRequestFields # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,14 +18,21 @@ class TasksClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawTasksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawTasksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawTasksClient + """ + return self._raw_client def create_many_status( - self, - id: int, - import_pk: str, - *, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> ProjectImport: """ @@ -69,24 +71,8 @@ def create_many_status( import_pk="import_pk", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectImport, - parse_obj_as( - type_=ProjectImport, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create_many_status(id, import_pk, request_options=request_options) + return _response.data def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -118,18 +104,8 @@ def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestO id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete_all_tasks(id, request_options=request_options) + return _response.data def list( self, @@ -209,51 +185,18 @@ def list( for page in response.iter_pages(): yield page """ - page = page if page is not None else 1 - _response = self._client_wrapper.httpx_client.request( - "api/tasks/", - method="GET", - params={ - "page": page, - "page_size": page_size, - "view": view, - "project": project, - "resolve_uri": resolve_uri, - "fields": fields, - "review": review, - "include": include, - "query": query, - }, + return self._raw_client.list( + page=page, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - TasksListResponse, - parse_obj_as( - type_=TasksListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - _items = _parsed_response.tasks - return SyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) def create( self, @@ -298,32 +241,8 @@ def create( project=1, ) """ - _response = self._client_wrapper.httpx_client.request( - "api/tasks/", - method="POST", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DataManagerTaskSerializer: """ @@ -355,24 +274,8 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - DataManagerTaskSerializer, - parse_obj_as( - type_=DataManagerTaskSerializer, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -406,18 +309,8 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -465,44 +358,27 @@ def update( project=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data class AsyncTasksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawTasksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawTasksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawTasksClient + """ + return self._raw_client async def create_many_status( - self, - id: int, - import_pk: str, - *, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None ) -> ProjectImport: """ @@ -549,24 +425,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ProjectImport, - parse_obj_as( - type_=ProjectImport, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create_many_status(id, import_pk, request_options=request_options) + return _response.data async def delete_all_tasks(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -606,18 +466,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/projects/{jsonable_encoder(id)}/tasks/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete_all_tasks(id, request_options=request_options) + return _response.data async def list( self, @@ -698,6 +548,7 @@ async def main() -> None: response = await client.tasks.list() async for item in response: yield item + # alternatively, you can paginate page-by-page async for page in response.iter_pages(): yield page @@ -705,51 +556,18 @@ async def main() -> None: asyncio.run(main()) """ - page = page if page is not None else 1 - _response = await self._client_wrapper.httpx_client.request( - "api/tasks/", - method="GET", - params={ - "page": page, - "page_size": page_size, - "view": view, - "project": project, - "resolve_uri": resolve_uri, - "fields": fields, - "review": review, - "include": include, - "query": query, - }, + return await self._raw_client.list( + page=page, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, request_options=request_options, ) - try: - if 200 <= _response.status_code < 300: - _parsed_response = typing.cast( - TasksListResponse, - parse_obj_as( - type_=TasksListResponse, # type: ignore - object_=_response.json(), - ), - ) - _has_next = True - _get_next = lambda: self.list( - page=page + 1, - page_size=page_size, - view=view, - project=project, - resolve_uri=resolve_uri, - fields=fields, - review=review, - include=include, - query=query, - request_options=request_options, - ) - _items = _parsed_response.tasks - return AsyncPager(has_next=_has_next, items=_items, get_next=_get_next) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) async def create( self, @@ -805,32 +623,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/tasks/", - method="POST", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data async def get( self, id: str, *, request_options: typing.Optional[RequestOptions] = None @@ -872,24 +666,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - DataManagerTaskSerializer, - parse_obj_as( - type_=DataManagerTaskSerializer, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -931,18 +709,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -1001,29 +769,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/tasks/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": data, - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseTask, - parse_obj_as( - type_=BaseTask, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/tasks/raw_client.py b/src/label_studio_sdk/tasks/raw_client.py new file mode 100644 index 000000000..152ab9860 --- /dev/null +++ b/src/label_studio_sdk/tasks/raw_client.py @@ -0,0 +1,816 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pagination import AsyncPager, BaseHttpResponse, SyncPager +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.base_task import BaseTask +from ..types.data_manager_task_serializer import DataManagerTaskSerializer +from ..types.project_import import ProjectImport +from ..types.task import Task +from .types.tasks_list_request_fields import TasksListRequestFields +from .types.tasks_list_response import TasksListResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawTasksClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def create_many_status( + self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ProjectImport]: + """ + + Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. + + You will need the project ID and the unique ID of the import operation. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + The import ID is returned as part of the response when you call [Import tasks](import-tasks). + + Parameters + ---------- + id : int + The project ID. + + import_pk : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ProjectImport] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete_all_tasks( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete all tasks from a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list( + self, + *, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + view: typing.Optional[int] = None, + project: typing.Optional[int] = None, + resolve_uri: typing.Optional[bool] = None, + fields: typing.Optional[TasksListRequestFields] = None, + review: typing.Optional[bool] = None, + include: typing.Optional[str] = None, + query: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> SyncPager[Task]: + """ + + Retrieve a list of tasks. + + You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). + + Parameters + ---------- + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + view : typing.Optional[int] + View ID + + project : typing.Optional[int] + Project ID + + resolve_uri : typing.Optional[bool] + Resolve task data URIs using Cloud Storage + + fields : typing.Optional[TasksListRequestFields] + Set to "all" if you want to include annotations and predictions in the response + + review : typing.Optional[bool] + Get tasks for review + + include : typing.Optional[str] + Specify which fields to include in the response + + query : typing.Optional[str] + Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. + + * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
+ Example: `["completed_at"]` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SyncPager[Task] + List of Tasks + """ + page = page if page is not None else 1 + + _response = self._client_wrapper.httpx_client.request( + "api/tasks/", + method="GET", + params={ + "page": page, + "page_size": page_size, + "view": view, + "project": project, + "resolve_uri": resolve_uri, + "fields": fields, + "review": review, + "include": include, + "query": query, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.tasks + _has_next = True + _get_next = lambda: self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + return SyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseTask]: + """ + + Create a new labeling task in Label Studio. + + The data you provide depends on your labeling config and data type. + + You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseTask] + Created task + """ + _response = self._client_wrapper.httpx_client.request( + "api/tasks/", + method="POST", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[DataManagerTaskSerializer]: + """ + + Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. + The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DataManagerTaskSerializer] + Task + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a task in Label Studio. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + This action cannot be undone. + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: str, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseTask]: + """ + + Update the attributes of an existing labeling task. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseTask] + Updated task + """ + _response = self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawTasksClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def create_many_status( + self, id: int, import_pk: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ProjectImport]: + """ + + Get information about an async project import operation. This can be especially useful to monitor status, as large import jobs can take time. + + You will need the project ID and the unique ID of the import operation. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + The import ID is returned as part of the response when you call [Import tasks](import-tasks). + + Parameters + ---------- + id : int + The project ID. + + import_pk : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ProjectImport] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/imports/{jsonable_encoder(import_pk)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ProjectImport, + parse_obj_as( + type_=ProjectImport, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete_all_tasks( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete all tasks from a specific project. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + id : int + A unique integer value identifying this project. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/projects/{jsonable_encoder(id)}/tasks/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, + *, + page: typing.Optional[int] = None, + page_size: typing.Optional[int] = None, + view: typing.Optional[int] = None, + project: typing.Optional[int] = None, + resolve_uri: typing.Optional[bool] = None, + fields: typing.Optional[TasksListRequestFields] = None, + review: typing.Optional[bool] = None, + include: typing.Optional[str] = None, + query: typing.Optional[str] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncPager[Task]: + """ + + Retrieve a list of tasks. + + You can use the query parameters to filter the list by project and/or view (a tab within the Data Manager). You can also optionally add pagination to make the response easier to parse. + + The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). The view ID can be found using [List views](../views/list). + + Parameters + ---------- + page : typing.Optional[int] + A page number within the paginated result set. + + page_size : typing.Optional[int] + Number of results to return per page. + + view : typing.Optional[int] + View ID + + project : typing.Optional[int] + Project ID + + resolve_uri : typing.Optional[bool] + Resolve task data URIs using Cloud Storage + + fields : typing.Optional[TasksListRequestFields] + Set to "all" if you want to include annotations and predictions in the response + + review : typing.Optional[bool] + Get tasks for review + + include : typing.Optional[str] + Specify which fields to include in the response + + query : typing.Optional[str] + Additional query to filter tasks. It must be JSON encoded string of dict containing one of the following parameters: `{"filters": ..., "selectedItems": ..., "ordering": ...}`. Check [Data Manager > Create View > see `data` field](#tag/Data-Manager/operation/api_dm_views_create) for more details about filters, selectedItems and ordering. + + * **filters**: dict with `"conjunction"` string (`"or"` or `"and"`) and list of filters in `"items"` array. Each filter is a dictionary with keys: `"filter"`, `"operator"`, `"type"`, `"value"`. [Read more about available filters](https://labelstud.io/sdk/data_manager.html)
Example: `{"conjunction": "or", "items": [{"filter": "filter:tasks:completed_at", "operator": "greater", "type": "Datetime", "value": "2021-01-01T00:00:00.000Z"}]}` + * **selectedItems**: dictionary with keys: `"all"`, `"included"`, `"excluded"`. If "all" is `false`, `"included"` must be used. If "all" is `true`, `"excluded"` must be used.
Examples: `{"all": false, "included": [1, 2, 3]}` or `{"all": true, "excluded": [4, 5]}` + * **ordering**: list of fields to order by. Currently, ordering is supported by only one parameter.
+ Example: `["completed_at"]` + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncPager[Task] + List of Tasks + """ + page = page if page is not None else 1 + + _response = await self._client_wrapper.httpx_client.request( + "api/tasks/", + method="GET", + params={ + "page": page, + "page_size": page_size, + "view": view, + "project": project, + "resolve_uri": resolve_uri, + "fields": fields, + "review": review, + "include": include, + "query": query, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _parsed_response = typing.cast( + TasksListResponse, + parse_obj_as( + type_=TasksListResponse, # type: ignore + object_=_response.json(), + ), + ) + _items = _parsed_response.tasks + _has_next = True + + async def _get_next(): + return await self.list( + page=page + 1, + page_size=page_size, + view=view, + project=project, + resolve_uri=resolve_uri, + fields=fields, + review=review, + include=include, + query=query, + request_options=request_options, + ) + + return AsyncPager( + has_next=_has_next, items=_items, get_next=_get_next, response=BaseHttpResponse(response=_response) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseTask]: + """ + + Create a new labeling task in Label Studio. + + The data you provide depends on your labeling config and data type. + + You will also need to provide a project ID. The project ID can be found in the URL when viewing the project in Label Studio, or you can retrieve all project IDs using [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseTask] + Created task + """ + _response = await self._client_wrapper.httpx_client.request( + "api/tasks/", + method="POST", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[DataManagerTaskSerializer]: + """ + + Get task data, metadata, annotations and other attributes for a specific labeling task by task ID. + The task ID is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DataManagerTaskSerializer] + Task + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DataManagerTaskSerializer, + parse_obj_as( + type_=DataManagerTaskSerializer, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a task in Label Studio. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + This action cannot be undone. + + Parameters + ---------- + id : str + Task ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: str, + *, + data: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseTask]: + """ + + Update the attributes of an existing labeling task. + + You will need the task ID. This is available from the Label Studio URL when viewing the task, or you can retrieve it programmatically with [Get task list](list). + + Parameters + ---------- + id : str + Task ID + + data : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Task data dictionary with arbitrary keys and values + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseTask] + Updated task + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/tasks/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": data, + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseTask, + parse_obj_as( + type_=BaseTask, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/tasks/types/__init__.py b/src/label_studio_sdk/tasks/types/__init__.py index bba1de8a3..436b4e548 100644 --- a/src/label_studio_sdk/tasks/types/__init__.py +++ b/src/label_studio_sdk/tasks/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .tasks_list_request_fields import TasksListRequestFields from .tasks_list_response import TasksListResponse diff --git a/src/label_studio_sdk/tasks/types/tasks_list_response.py b/src/label_studio_sdk/tasks/types/tasks_list_response.py index c8d9e0240..00d717374 100644 --- a/src/label_studio_sdk/tasks/types/tasks_list_response.py +++ b/src/label_studio_sdk/tasks/types/tasks_list_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from ...types.task import Task + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from ...types.task import Task class TasksListResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/tokens/__init__.py b/src/label_studio_sdk/tokens/__init__.py index f3ea2659b..5cde0202d 100644 --- a/src/label_studio_sdk/tokens/__init__.py +++ b/src/label_studio_sdk/tokens/__init__.py @@ -1,2 +1,4 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + diff --git a/src/label_studio_sdk/tokens/client.py b/src/label_studio_sdk/tokens/client.py index 4b0308939..0191ade07 100644 --- a/src/label_studio_sdk/tokens/client.py +++ b/src/label_studio_sdk/tokens/client.py @@ -1,18 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from ..errors.not_found_error import NotFoundError -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..types.api_token_response import ApiTokenResponse from ..types.access_token_response import AccessTokenResponse -from ..errors.unauthorized_error import UnauthorizedError +from ..types.api_token_response import ApiTokenResponse from ..types.rotate_token_response import RotateTokenResponse -from ..errors.bad_request_error import BadRequestError -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawTokensClient, RawTokensClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -20,7 +15,18 @@ class TokensClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawTokensClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawTokensClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawTokensClient + """ + return self._raw_client def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -49,35 +55,8 @@ def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOpt refresh="refresh", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/token/blacklist", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.blacklist(refresh=refresh, request_options=request_options) + return _response.data def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ApiTokenResponse]: """ @@ -102,24 +81,8 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typ ) client.tokens.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ApiTokenResponse], - parse_obj_as( - type_=typing.List[ApiTokenResponse], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(request_options=request_options) + return _response.data def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ApiTokenResponse: """ @@ -144,24 +107,8 @@ def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ) client.tokens.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/token", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ApiTokenResponse, - parse_obj_as( - type_=ApiTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(request_options=request_options) + return _response.data def refresh(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> AccessTokenResponse: """ @@ -191,41 +138,8 @@ def refresh(self, *, refresh: str, request_options: typing.Optional[RequestOptio refresh="refresh", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/token/refresh", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AccessTokenResponse, - parse_obj_as( - type_=AccessTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 401: - raise UnauthorizedError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.refresh(refresh=refresh, request_options=request_options) + return _response.data def rotate(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> RotateTokenResponse: """ @@ -255,46 +169,24 @@ def rotate(self, *, refresh: str, request_options: typing.Optional[RequestOption refresh="refresh", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/token/rotate", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RotateTokenResponse, - parse_obj_as( - type_=RotateTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.rotate(refresh=refresh, request_options=request_options) + return _response.data class AsyncTokensClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawTokensClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawTokensClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawTokensClient + """ + return self._raw_client async def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -331,35 +223,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/blacklist", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - if _response.status_code == 404: - raise NotFoundError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.blacklist(refresh=refresh, request_options=request_options) + return _response.data async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[ApiTokenResponse]: """ @@ -392,24 +257,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[ApiTokenResponse], - parse_obj_as( - type_=typing.List[ApiTokenResponse], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(request_options=request_options) + return _response.data async def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> ApiTokenResponse: """ @@ -442,24 +291,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - ApiTokenResponse, - parse_obj_as( - type_=ApiTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(request_options=request_options) + return _response.data async def refresh( self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None @@ -499,41 +332,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/refresh", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - AccessTokenResponse, - parse_obj_as( - type_=AccessTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 401: - raise UnauthorizedError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.refresh(refresh=refresh, request_options=request_options) + return _response.data async def rotate( self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None @@ -573,38 +373,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/token/rotate", - method="POST", - json={ - "refresh": refresh, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - RotateTokenResponse, - parse_obj_as( - type_=RotateTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - if _response.status_code == 400: - raise BadRequestError( - typing.cast( - typing.Optional[typing.Any], - parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore - object_=_response.json(), - ), - ) - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.rotate(refresh=refresh, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/tokens/raw_client.py b/src/label_studio_sdk/tokens/raw_client.py new file mode 100644 index 000000000..a68f07754 --- /dev/null +++ b/src/label_studio_sdk/tokens/raw_client.py @@ -0,0 +1,495 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..errors.bad_request_error import BadRequestError +from ..errors.not_found_error import NotFoundError +from ..errors.unauthorized_error import UnauthorizedError +from ..types.access_token_response import AccessTokenResponse +from ..types.api_token_response import ApiTokenResponse +from ..types.rotate_token_response import RotateTokenResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawTokensClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def blacklist(self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Blacklist a refresh token to prevent its future use. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/token/blacklist", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[ApiTokenResponse]]: + """ + List all API tokens for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[ApiTokenResponse]] + List of API tokens retrieved successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ApiTokenResponse], + parse_obj_as( + type_=typing.List[ApiTokenResponse], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[ApiTokenResponse]: + """ + Create a new API token for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ApiTokenResponse] + Token created successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/token", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiTokenResponse, + parse_obj_as( + type_=ApiTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def refresh( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[AccessTokenResponse]: + """ + Get a new access token, using a refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[AccessTokenResponse] + New access token created successfully + """ + _response = self._client_wrapper.httpx_client.request( + "api/token/refresh", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AccessTokenResponse, + parse_obj_as( + type_=AccessTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def rotate( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[RotateTokenResponse]: + """ + Blacklist existing refresh token, and get a new refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[RotateTokenResponse] + Refresh token successfully rotated + """ + _response = self._client_wrapper.httpx_client.request( + "api/token/rotate", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RotateTokenResponse, + parse_obj_as( + type_=RotateTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawTokensClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def blacklist( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Blacklist a refresh token to prevent its future use. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token/blacklist", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + if _response.status_code == 404: + raise NotFoundError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[ApiTokenResponse]]: + """ + List all API tokens for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[ApiTokenResponse]] + List of API tokens retrieved successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[ApiTokenResponse], + parse_obj_as( + type_=typing.List[ApiTokenResponse], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ApiTokenResponse]: + """ + Create a new API token for the current user. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ApiTokenResponse] + Token created successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ApiTokenResponse, + parse_obj_as( + type_=ApiTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def refresh( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[AccessTokenResponse]: + """ + Get a new access token, using a refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[AccessTokenResponse] + New access token created successfully + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token/refresh", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + AccessTokenResponse, + parse_obj_as( + type_=AccessTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 401: + raise UnauthorizedError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def rotate( + self, *, refresh: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[RotateTokenResponse]: + """ + Blacklist existing refresh token, and get a new refresh token. + + Parameters + ---------- + refresh : str + JWT refresh token + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[RotateTokenResponse] + Refresh token successfully rotated + """ + _response = await self._client_wrapper.httpx_client.request( + "api/token/rotate", + method="POST", + json={ + "refresh": refresh, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + RotateTokenResponse, + parse_obj_as( + type_=RotateTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + headers=dict(_response.headers), + body=typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/types/__init__.py b/src/label_studio_sdk/types/__init__.py index 6fe7e2249..248085551 100644 --- a/src/label_studio_sdk/types/__init__.py +++ b/src/label_studio_sdk/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .access_token_response import AccessTokenResponse from .annotation import Annotation from .annotation_completed_by import AnnotationCompletedBy @@ -22,8 +24,10 @@ from .converted_format_status import ConvertedFormatStatus from .data_manager_task_serializer import DataManagerTaskSerializer from .data_manager_task_serializer_annotators_item import DataManagerTaskSerializerAnnotatorsItem +from .data_manager_task_serializer_comment_authors_item import DataManagerTaskSerializerCommentAuthorsItem from .data_manager_task_serializer_drafts_item import DataManagerTaskSerializerDraftsItem from .data_manager_task_serializer_predictions_item import DataManagerTaskSerializerPredictionsItem +from .data_manager_task_serializer_predictions_item_model_run import DataManagerTaskSerializerPredictionsItemModelRun from .export import Export from .export_format import ExportFormat from .export_snapshot import ExportSnapshot @@ -130,8 +134,10 @@ "ConvertedFormatStatus", "DataManagerTaskSerializer", "DataManagerTaskSerializerAnnotatorsItem", + "DataManagerTaskSerializerCommentAuthorsItem", "DataManagerTaskSerializerDraftsItem", "DataManagerTaskSerializerPredictionsItem", + "DataManagerTaskSerializerPredictionsItemModelRun", "Export", "ExportFormat", "ExportSnapshot", diff --git a/src/label_studio_sdk/types/access_token_response.py b/src/label_studio_sdk/types/access_token_response.py index 3c55d97da..57df75713 100644 --- a/src/label_studio_sdk/types/access_token_response.py +++ b/src/label_studio_sdk/types/access_token_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class AccessTokenResponse(UniversalBaseModel): access: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/annotation.py b/src/label_studio_sdk/types/annotation.py index 9463e39b2..e27a951a7 100644 --- a/src/label_studio_sdk/types/annotation.py +++ b/src/label_studio_sdk/types/annotation.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotation_completed_by import AnnotationCompletedBy -import datetime as dt from .annotation_last_action import AnnotationLastAction -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Annotation(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/annotation_completed_by.py b/src/label_studio_sdk/types/annotation_completed_by.py index b9e7caf52..f3f4a635e 100644 --- a/src/label_studio_sdk/types/annotation_completed_by.py +++ b/src/label_studio_sdk/types/annotation_completed_by.py @@ -1,6 +1,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .user_simple import UserSimple AnnotationCompletedBy = typing.Union[UserSimple, int] diff --git a/src/label_studio_sdk/types/annotation_filter_options.py b/src/label_studio_sdk/types/annotation_filter_options.py index 3f00e64ba..3e3f86ed4 100644 --- a/src/label_studio_sdk/types/annotation_filter_options.py +++ b/src/label_studio_sdk/types/annotation_filter_options.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class AnnotationFilterOptions(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/annotations_dm_field.py b/src/label_studio_sdk/types/annotations_dm_field.py index 114de210d..0aa2976f4 100644 --- a/src/label_studio_sdk/types/annotations_dm_field.py +++ b/src/label_studio_sdk/types/annotations_dm_field.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotations_dm_field_last_action import AnnotationsDmFieldLastAction -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AnnotationsDmField(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/api_token_response.py b/src/label_studio_sdk/types/api_token_response.py index 72d4fddd3..5f7ab74bd 100644 --- a/src/label_studio_sdk/types/api_token_response.py +++ b/src/label_studio_sdk/types/api_token_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class ApiTokenResponse(UniversalBaseModel): token: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/azure_blob_export_storage.py b/src/label_studio_sdk/types/azure_blob_export_storage.py index 83394ea4d..20500cf84 100644 --- a/src/label_studio_sdk/types/azure_blob_export_storage.py +++ b/src/label_studio_sdk/types/azure_blob_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .azure_blob_export_storage_status import AzureBlobExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AzureBlobExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/azure_blob_export_storage_status.py b/src/label_studio_sdk/types/azure_blob_export_storage_status.py index 0b25961af..d63998f7b 100644 --- a/src/label_studio_sdk/types/azure_blob_export_storage_status.py +++ b/src/label_studio_sdk/types/azure_blob_export_storage_status.py @@ -3,6 +3,5 @@ import typing AzureBlobExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/azure_blob_import_storage.py b/src/label_studio_sdk/types/azure_blob_import_storage.py index 3de9b873b..c5f3648b0 100644 --- a/src/label_studio_sdk/types/azure_blob_import_storage.py +++ b/src/label_studio_sdk/types/azure_blob_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .azure_blob_import_storage_status import AzureBlobImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class AzureBlobImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/azure_blob_import_storage_status.py b/src/label_studio_sdk/types/azure_blob_import_storage_status.py index 05c38a48b..bb2b31b26 100644 --- a/src/label_studio_sdk/types/azure_blob_import_storage_status.py +++ b/src/label_studio_sdk/types/azure_blob_import_storage_status.py @@ -3,6 +3,5 @@ import typing AzureBlobImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/base_task.py b/src/label_studio_sdk/types/base_task.py index 3e8251862..a06228dd3 100644 --- a/src/label_studio_sdk/types/base_task.py +++ b/src/label_studio_sdk/types/base_task.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt -from .base_task_updated_by import BaseTaskUpdatedBy +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .base_task_file_upload import BaseTaskFileUpload -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .base_task_updated_by import BaseTaskUpdatedBy class BaseTask(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/base_task_updated_by.py b/src/label_studio_sdk/types/base_task_updated_by.py index 1849b31d9..4a9de7471 100644 --- a/src/label_studio_sdk/types/base_task_updated_by.py +++ b/src/label_studio_sdk/types/base_task_updated_by.py @@ -3,6 +3,5 @@ import typing BaseTaskUpdatedBy = typing.Union[ - typing.Optional[int], - typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]], + typing.Optional[int], typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] ] diff --git a/src/label_studio_sdk/types/base_user.py b/src/label_studio_sdk/types/base_user.py index 6edb03619..ef14b7fa2 100644 --- a/src/label_studio_sdk/types/base_user.py +++ b/src/label_studio_sdk/types/base_user.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class BaseUser(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/comment.py b/src/label_studio_sdk/types/comment.py index 5f48c133b..f52785a71 100644 --- a/src/label_studio_sdk/types/comment.py +++ b/src/label_studio_sdk/types/comment.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -from .comment_created_by import CommentCreatedBy import datetime as dt import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .comment_created_by import CommentCreatedBy class Comment(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/converted_format.py b/src/label_studio_sdk/types/converted_format.py index bc0bf56aa..70b6583de 100644 --- a/src/label_studio_sdk/types/converted_format.py +++ b/src/label_studio_sdk/types/converted_format.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .converted_format_status import ConvertedFormatStatus + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .converted_format_status import ConvertedFormatStatus class ConvertedFormat(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer.py b/src/label_studio_sdk/types/data_manager_task_serializer.py index a129aa5c6..9e454d44c 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer.py @@ -1,20 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .data_manager_task_serializer_predictions_item import ( - DataManagerTaskSerializerPredictionsItem, -) + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotations_dm_field import AnnotationsDmField -from .data_manager_task_serializer_drafts_item import ( - DataManagerTaskSerializerDraftsItem, -) -from .data_manager_task_serializer_annotators_item import ( - DataManagerTaskSerializerAnnotatorsItem, -) -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .data_manager_task_serializer_annotators_item import DataManagerTaskSerializerAnnotatorsItem +from .data_manager_task_serializer_comment_authors_item import DataManagerTaskSerializerCommentAuthorsItem +from .data_manager_task_serializer_drafts_item import DataManagerTaskSerializerDraftsItem +from .data_manager_task_serializer_predictions_item import DataManagerTaskSerializerPredictionsItem class DataManagerTaskSerializer(UniversalBaseModel): @@ -106,7 +101,9 @@ class DataManagerTaskSerializer(UniversalBaseModel): Project ID for this task """ - comment_authors: typing.Optional[typing.List[int]] = pydantic.Field(default=None) + comment_authors: typing.Optional[typing.List[DataManagerTaskSerializerCommentAuthorsItem]] = pydantic.Field( + default=None + ) """ Users who wrote comments """ diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_comment_authors_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_comment_authors_item.py new file mode 100644 index 000000000..e9932b27c --- /dev/null +++ b/src/label_studio_sdk/types/data_manager_task_serializer_comment_authors_item.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DataManagerTaskSerializerCommentAuthorsItem = typing.Union[int, typing.Dict[str, typing.Optional[typing.Any]]] diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py index 792c89c46..8d334b7b4 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_drafts_item.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class DataManagerTaskSerializerDraftsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py index 8b01227e4..d9ef29254 100644 --- a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py +++ b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item.py @@ -1,10 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .data_manager_task_serializer_predictions_item_model_run import DataManagerTaskSerializerPredictionsItemModelRun class DataManagerTaskSerializerPredictionsItem(UniversalBaseModel): @@ -12,7 +13,7 @@ class DataManagerTaskSerializerPredictionsItem(UniversalBaseModel): score: typing.Optional[float] = None model_version: typing.Optional[str] = None model: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None - model_run: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None + model_run: typing.Optional[DataManagerTaskSerializerPredictionsItemModelRun] = None task: typing.Optional[int] = None project: typing.Optional[float] = None created_at: typing.Optional[dt.datetime] = None diff --git a/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item_model_run.py b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item_model_run.py new file mode 100644 index 000000000..7fc3dd4c2 --- /dev/null +++ b/src/label_studio_sdk/types/data_manager_task_serializer_predictions_item_model_run.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +DataManagerTaskSerializerPredictionsItemModelRun = typing.Union[typing.Dict[str, typing.Optional[typing.Any]], int] diff --git a/src/label_studio_sdk/types/export.py b/src/label_studio_sdk/types/export.py index 1d7f45038..c1c80f164 100644 --- a/src/label_studio_sdk/types/export.py +++ b/src/label_studio_sdk/types/export.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from .user_simple import UserSimple import datetime as dt +import typing + import pydantic -from .export_status import ExportStatus +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .converted_format import ConvertedFormat -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .export_status import ExportStatus +from .user_simple import UserSimple class Export(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/export_snapshot.py b/src/label_studio_sdk/types/export_snapshot.py index 4f23e9996..afc7920e6 100644 --- a/src/label_studio_sdk/types/export_snapshot.py +++ b/src/label_studio_sdk/types/export_snapshot.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from .user_simple import UserSimple import datetime as dt +import typing + import pydantic -from .export_snapshot_status import ExportSnapshotStatus -from .converted_format import ConvertedFormat -from .task_filter_options import TaskFilterOptions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .annotation_filter_options import AnnotationFilterOptions +from .converted_format import ConvertedFormat +from .export_snapshot_status import ExportSnapshotStatus from .serialization_options import SerializationOptions -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .task_filter_options import TaskFilterOptions +from .user_simple import UserSimple class ExportSnapshot(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/file_upload.py b/src/label_studio_sdk/types/file_upload.py index 8fcd31f62..c4320edaf 100644 --- a/src/label_studio_sdk/types/file_upload.py +++ b/src/label_studio_sdk/types/file_upload.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class FileUpload(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/filter.py b/src/label_studio_sdk/types/filter.py index c5e37fa4d..c301db612 100644 --- a/src/label_studio_sdk/types/filter.py +++ b/src/label_studio_sdk/types/filter.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class Filter(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/filter_group.py b/src/label_studio_sdk/types/filter_group.py index 626b8a439..7dc87b712 100644 --- a/src/label_studio_sdk/types/filter_group.py +++ b/src/label_studio_sdk/types/filter_group.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .filter import Filter + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .filter import Filter class FilterGroup(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_export_storage.py b/src/label_studio_sdk/types/gcs_export_storage.py index 399102266..df5cd7189 100644 --- a/src/label_studio_sdk/types/gcs_export_storage.py +++ b/src/label_studio_sdk/types/gcs_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .gcs_export_storage_status import GcsExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class GcsExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_export_storage_status.py b/src/label_studio_sdk/types/gcs_export_storage_status.py index 64534c344..9284fa5b0 100644 --- a/src/label_studio_sdk/types/gcs_export_storage_status.py +++ b/src/label_studio_sdk/types/gcs_export_storage_status.py @@ -3,6 +3,5 @@ import typing GcsExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/gcs_import_storage.py b/src/label_studio_sdk/types/gcs_import_storage.py index ee406e985..269b48fd0 100644 --- a/src/label_studio_sdk/types/gcs_import_storage.py +++ b/src/label_studio_sdk/types/gcs_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .gcs_import_storage_status import GcsImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class GcsImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/gcs_import_storage_status.py b/src/label_studio_sdk/types/gcs_import_storage_status.py index 44d6fa825..0c503c5e3 100644 --- a/src/label_studio_sdk/types/gcs_import_storage_status.py +++ b/src/label_studio_sdk/types/gcs_import_storage_status.py @@ -3,6 +3,5 @@ import typing GcsImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/inference_run.py b/src/label_studio_sdk/types/inference_run.py index b6837d9cc..6a1e5d6f5 100644 --- a/src/label_studio_sdk/types/inference_run.py +++ b/src/label_studio_sdk/types/inference_run.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .inference_run_organization import InferenceRunOrganization + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .inference_run_created_by import InferenceRunCreatedBy +from .inference_run_organization import InferenceRunOrganization from .inference_run_project_subset import InferenceRunProjectSubset from .inference_run_status import InferenceRunStatus -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 -import pydantic class InferenceRun(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/inference_run_cost_estimate.py b/src/label_studio_sdk/types/inference_run_cost_estimate.py index 103f975c3..531292f47 100644 --- a/src/label_studio_sdk/types/inference_run_cost_estimate.py +++ b/src/label_studio_sdk/types/inference_run_cost_estimate.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class InferenceRunCostEstimate(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/inference_run_status.py b/src/label_studio_sdk/types/inference_run_status.py index b061f5320..b832b23ad 100644 --- a/src/label_studio_sdk/types/inference_run_status.py +++ b/src/label_studio_sdk/types/inference_run_status.py @@ -3,6 +3,5 @@ import typing InferenceRunStatus = typing.Union[ - typing.Literal["Pending", "InProgress", "Completed", "Failed", "Canceled"], - typing.Any, + typing.Literal["Pending", "InProgress", "Completed", "Failed", "Canceled"], typing.Any ] diff --git a/src/label_studio_sdk/types/jwt_settings_response.py b/src/label_studio_sdk/types/jwt_settings_response.py index a2c1fb95a..99e431d8f 100644 --- a/src/label_studio_sdk/types/jwt_settings_response.py +++ b/src/label_studio_sdk/types/jwt_settings_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class JwtSettingsResponse(UniversalBaseModel): api_tokens_enabled: bool = pydantic.Field() diff --git a/src/label_studio_sdk/types/key_indicator_value.py b/src/label_studio_sdk/types/key_indicator_value.py index 291358e18..116d4d2f6 100644 --- a/src/label_studio_sdk/types/key_indicator_value.py +++ b/src/label_studio_sdk/types/key_indicator_value.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class KeyIndicatorValue(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators.py b/src/label_studio_sdk/types/key_indicators.py index 23e5b71de..d03c3cd18 100644 --- a/src/label_studio_sdk/types/key_indicators.py +++ b/src/label_studio_sdk/types/key_indicators.py @@ -1,6 +1,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .key_indicators_item import KeyIndicatorsItem KeyIndicators = typing.List[KeyIndicatorsItem] diff --git a/src/label_studio_sdk/types/key_indicators_item.py b/src/label_studio_sdk/types/key_indicators_item.py index eee9df6e8..bc5dc63a6 100644 --- a/src/label_studio_sdk/types/key_indicators_item.py +++ b/src/label_studio_sdk/types/key_indicators_item.py @@ -1,13 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic import typing -from .key_indicators_item_additional_kpis_item import ( - KeyIndicatorsItemAdditionalKpisItem, -) + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .key_indicators_item_additional_kpis_item import KeyIndicatorsItemAdditionalKpisItem from .key_indicators_item_extra_kpis_item import KeyIndicatorsItemExtraKpisItem -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class KeyIndicatorsItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py index a0e1b06ca..ea89a9020 100644 --- a/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_additional_kpis_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class KeyIndicatorsItemAdditionalKpisItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py index 9e539bc1d..fe5f23248 100644 --- a/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py +++ b/src/label_studio_sdk/types/key_indicators_item_extra_kpis_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class KeyIndicatorsItemExtraKpisItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_export_storage.py b/src/label_studio_sdk/types/local_files_export_storage.py index fffaaaa84..596487c0d 100644 --- a/src/label_studio_sdk/types/local_files_export_storage.py +++ b/src/label_studio_sdk/types/local_files_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .local_files_export_storage_status import LocalFilesExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class LocalFilesExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_export_storage_status.py b/src/label_studio_sdk/types/local_files_export_storage_status.py index da4b79bb8..440144230 100644 --- a/src/label_studio_sdk/types/local_files_export_storage_status.py +++ b/src/label_studio_sdk/types/local_files_export_storage_status.py @@ -3,6 +3,5 @@ import typing LocalFilesExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/local_files_import_storage.py b/src/label_studio_sdk/types/local_files_import_storage.py index 57240a844..5a0d70f93 100644 --- a/src/label_studio_sdk/types/local_files_import_storage.py +++ b/src/label_studio_sdk/types/local_files_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .local_files_import_storage_status import LocalFilesImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class LocalFilesImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/local_files_import_storage_status.py b/src/label_studio_sdk/types/local_files_import_storage_status.py index d05328925..562ec5011 100644 --- a/src/label_studio_sdk/types/local_files_import_storage_status.py +++ b/src/label_studio_sdk/types/local_files_import_storage_status.py @@ -3,6 +3,5 @@ import typing LocalFilesImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/ml_backend.py b/src/label_studio_sdk/types/ml_backend.py index 21fd41e90..067326c1e 100644 --- a/src/label_studio_sdk/types/ml_backend.py +++ b/src/label_studio_sdk/types/ml_backend.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .ml_backend_state import MlBackendState + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ml_backend_auth_method import MlBackendAuthMethod -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .ml_backend_state import MlBackendState class MlBackend(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/model_provider_connection.py b/src/label_studio_sdk/types/model_provider_connection.py index 4f617467c..2d624ef77 100644 --- a/src/label_studio_sdk/types/model_provider_connection.py +++ b/src/label_studio_sdk/types/model_provider_connection.py @@ -1,17 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -from .model_provider_connection_provider import ModelProviderConnectionProvider -import typing -from .model_provider_connection_scope import ModelProviderConnectionScope -from .model_provider_connection_organization import ModelProviderConnectionOrganization -from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy import datetime as dt +import typing + import pydantic -from .model_provider_connection_budget_reset_period import ( - ModelProviderConnectionBudgetResetPeriod, -) -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .model_provider_connection_budget_reset_period import ModelProviderConnectionBudgetResetPeriod +from .model_provider_connection_created_by import ModelProviderConnectionCreatedBy +from .model_provider_connection_organization import ModelProviderConnectionOrganization +from .model_provider_connection_provider import ModelProviderConnectionProvider +from .model_provider_connection_scope import ModelProviderConnectionScope class ModelProviderConnection(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/model_provider_connection_provider.py b/src/label_studio_sdk/types/model_provider_connection_provider.py index ce1b21a29..fa58489fc 100644 --- a/src/label_studio_sdk/types/model_provider_connection_provider.py +++ b/src/label_studio_sdk/types/model_provider_connection_provider.py @@ -3,14 +3,5 @@ import typing ModelProviderConnectionProvider = typing.Union[ - typing.Literal[ - "OpenAI", - "AzureOpenAI", - "AzureAIFoundry", - "VertexAI", - "Gemini", - "Anthropic", - "Custom", - ], - typing.Any, + typing.Literal["OpenAI", "AzureOpenAI", "AzureAIFoundry", "VertexAI", "Gemini", "Anthropic", "Custom"], typing.Any ] diff --git a/src/label_studio_sdk/types/pause.py b/src/label_studio_sdk/types/pause.py index 7c9af1c99..74f5f7829 100644 --- a/src/label_studio_sdk/types/pause.py +++ b/src/label_studio_sdk/types/pause.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .pause_paused_by import PausePausedBy + import pydantic -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .pause_paused_by import PausePausedBy class Pause(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prediction.py b/src/label_studio_sdk/types/prediction.py index efb00f16b..797207464 100644 --- a/src/label_studio_sdk/types/prediction.py +++ b/src/label_studio_sdk/types/prediction.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class Prediction(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project.py b/src/label_studio_sdk/types/project.py index 0c78fddeb..56200362e 100644 --- a/src/label_studio_sdk/types/project.py +++ b/src/label_studio_sdk/types/project.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -from .prompt import Prompt -from .user_simple import UserSimple -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .project_sampling import ProjectSampling from .project_skip_queue import ProjectSkipQueue -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .prompt import Prompt +from .user_simple import UserSimple class Project(UniversalBaseModel): @@ -154,7 +154,7 @@ class Project(UniversalBaseModel): control_weights: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) """ - Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have it's own key in control weight dict with weight for each label and overall weight.For example, if bounding box annotation with control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice more important than Airplaine, then you have to need the specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplaine': 0.5}, 'overall': 0.33} + Dict of weights for each control tag in metric calculation. Each control tag (e.g. label or choice) will have its own key in control weight dict with weight for each label and overall weight. For example, if a bounding box annotation with a control tag named my_bbox should be included with 0.33 weight in agreement calculation, and the first label Car should be twice as important as Airplane, then you need to specify: {'my_bbox': {'type': 'RectangleLabels', 'labels': {'Car': 1.0, 'Airplane': 0.5}, 'overall': 0.33}} """ parsed_label_config: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) diff --git a/src/label_studio_sdk/types/project_import.py b/src/label_studio_sdk/types/project_import.py index 331fd485a..9874942a7 100644 --- a/src/label_studio_sdk/types/project_import.py +++ b/src/label_studio_sdk/types/project_import.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing -from .project_import_status import ProjectImportStatus import datetime as dt +import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .project_import_status import ProjectImportStatus class ProjectImport(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/project_label_config.py b/src/label_studio_sdk/types/project_label_config.py index 443fbb86e..5b21c6e3b 100644 --- a/src/label_studio_sdk/types/project_label_config.py +++ b/src/label_studio_sdk/types/project_label_config.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class ProjectLabelConfig(UniversalBaseModel): label_config: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/project_sampling.py b/src/label_studio_sdk/types/project_sampling.py index 27a186483..0c78d6e17 100644 --- a/src/label_studio_sdk/types/project_sampling.py +++ b/src/label_studio_sdk/types/project_sampling.py @@ -3,6 +3,5 @@ import typing ProjectSampling = typing.Union[ - typing.Literal["Sequential sampling", "Uniform sampling", "Uncertainty sampling"], - typing.Any, + typing.Literal["Sequential sampling", "Uniform sampling", "Uncertainty sampling"], typing.Any ] diff --git a/src/label_studio_sdk/types/prompt.py b/src/label_studio_sdk/types/prompt.py index 81c04e8c3..ad2173741 100644 --- a/src/label_studio_sdk/types/prompt.py +++ b/src/label_studio_sdk/types/prompt.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic +import datetime as dt import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .prompt_associated_projects_item import PromptAssociatedProjectsItem from .prompt_created_by import PromptCreatedBy -import datetime as dt from .prompt_organization import PromptOrganization -from .prompt_associated_projects_item import PromptAssociatedProjectsItem -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Prompt(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_associated_projects_item.py b/src/label_studio_sdk/types/prompt_associated_projects_item.py index 05ad2f37c..3b1bd38e7 100644 --- a/src/label_studio_sdk/types/prompt_associated_projects_item.py +++ b/src/label_studio_sdk/types/prompt_associated_projects_item.py @@ -1,6 +1,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .prompt_associated_projects_item_id import PromptAssociatedProjectsItemId PromptAssociatedProjectsItem = typing.Union[int, PromptAssociatedProjectsItemId] diff --git a/src/label_studio_sdk/types/prompt_associated_projects_item_id.py b/src/label_studio_sdk/types/prompt_associated_projects_item_id.py index 01c5c53c6..aad441f74 100644 --- a/src/label_studio_sdk/types/prompt_associated_projects_item_id.py +++ b/src/label_studio_sdk/types/prompt_associated_projects_item_id.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class PromptAssociatedProjectsItemId(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_version.py b/src/label_studio_sdk/types/prompt_version.py index 38f317b13..0058662f4 100644 --- a/src/label_studio_sdk/types/prompt_version.py +++ b/src/label_studio_sdk/types/prompt_version.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing -from .prompt_version_provider import PromptVersionProvider + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .prompt_version_created_by import PromptVersionCreatedBy -import datetime as dt from .prompt_version_organization import PromptVersionOrganization -from ..core.pydantic_utilities import IS_PYDANTIC_V2 -import pydantic +from .prompt_version_provider import PromptVersionProvider class PromptVersion(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/prompt_version_provider.py b/src/label_studio_sdk/types/prompt_version_provider.py index efdc2d37c..c46d1a164 100644 --- a/src/label_studio_sdk/types/prompt_version_provider.py +++ b/src/label_studio_sdk/types/prompt_version_provider.py @@ -3,14 +3,5 @@ import typing PromptVersionProvider = typing.Union[ - typing.Literal[ - "OpenAI", - "AzureOpenAI", - "AzureAIFoundry", - "VertexAI", - "Gemini", - "Anthropic", - "Custom", - ], - typing.Any, + typing.Literal["OpenAI", "AzureOpenAI", "AzureAIFoundry", "VertexAI", "Gemini", "Anthropic", "Custom"], typing.Any ] diff --git a/src/label_studio_sdk/types/redis_export_storage.py b/src/label_studio_sdk/types/redis_export_storage.py index 49d816584..e99a9d5c5 100644 --- a/src/label_studio_sdk/types/redis_export_storage.py +++ b/src/label_studio_sdk/types/redis_export_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .redis_export_storage_status import RedisExportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class RedisExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/redis_export_storage_status.py b/src/label_studio_sdk/types/redis_export_storage_status.py index 0f04ce717..1ef9709ff 100644 --- a/src/label_studio_sdk/types/redis_export_storage_status.py +++ b/src/label_studio_sdk/types/redis_export_storage_status.py @@ -3,6 +3,5 @@ import typing RedisExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/redis_import_storage.py b/src/label_studio_sdk/types/redis_import_storage.py index 0790e279c..a80604fee 100644 --- a/src/label_studio_sdk/types/redis_import_storage.py +++ b/src/label_studio_sdk/types/redis_import_storage.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .redis_import_storage_status import RedisImportStorageStatus -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class RedisImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/redis_import_storage_status.py b/src/label_studio_sdk/types/redis_import_storage_status.py index 3d7d0cece..c8db5e91c 100644 --- a/src/label_studio_sdk/types/redis_import_storage_status.py +++ b/src/label_studio_sdk/types/redis_import_storage_status.py @@ -3,6 +3,5 @@ import typing RedisImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/refined_prompt_response.py b/src/label_studio_sdk/types/refined_prompt_response.py index 7b77cb67f..728f5ff81 100644 --- a/src/label_studio_sdk/types/refined_prompt_response.py +++ b/src/label_studio_sdk/types/refined_prompt_response.py @@ -1,13 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from .refined_prompt_response_refinement_status import ( - RefinedPromptResponseRefinementStatus, -) +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .prompt_version import PromptVersion -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .refined_prompt_response_refinement_status import RefinedPromptResponseRefinementStatus class RefinedPromptResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/rotate_token_response.py b/src/label_studio_sdk/types/rotate_token_response.py index 81e404eb6..b71032dae 100644 --- a/src/label_studio_sdk/types/rotate_token_response.py +++ b/src/label_studio_sdk/types/rotate_token_response.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + class RotateTokenResponse(UniversalBaseModel): refresh: str = pydantic.Field() diff --git a/src/label_studio_sdk/types/s3export_storage.py b/src/label_studio_sdk/types/s3export_storage.py index ed4e36389..a658136dd 100644 --- a/src/label_studio_sdk/types/s3export_storage.py +++ b/src/label_studio_sdk/types/s3export_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from .s3export_storage_status import S3ExportStorageStatus import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .s3export_storage_status import S3ExportStorageStatus class S3ExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3export_storage_status.py b/src/label_studio_sdk/types/s3export_storage_status.py index c0bddb13a..b4427e391 100644 --- a/src/label_studio_sdk/types/s3export_storage_status.py +++ b/src/label_studio_sdk/types/s3export_storage_status.py @@ -3,6 +3,5 @@ import typing S3ExportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/s3import_storage.py b/src/label_studio_sdk/types/s3import_storage.py index dc713fc85..87faf912f 100644 --- a/src/label_studio_sdk/types/s3import_storage.py +++ b/src/label_studio_sdk/types/s3import_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from .s3import_storage_status import S3ImportStorageStatus import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .s3import_storage_status import S3ImportStorageStatus class S3ImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3import_storage_status.py b/src/label_studio_sdk/types/s3import_storage_status.py index 5a88667a0..e77526af8 100644 --- a/src/label_studio_sdk/types/s3import_storage_status.py +++ b/src/label_studio_sdk/types/s3import_storage_status.py @@ -3,6 +3,5 @@ import typing S3ImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/s3s_export_storage.py b/src/label_studio_sdk/types/s3s_export_storage.py index d7e1d616f..8e63ac2a3 100644 --- a/src/label_studio_sdk/types/s3s_export_storage.py +++ b/src/label_studio_sdk/types/s3s_export_storage.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class S3SExportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3s_import_storage.py b/src/label_studio_sdk/types/s3s_import_storage.py index 1362a231c..610b726a8 100644 --- a/src/label_studio_sdk/types/s3s_import_storage.py +++ b/src/label_studio_sdk/types/s3s_import_storage.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel -import typing import datetime as dt +import typing + import pydantic -from .s3s_import_storage_status import S3SImportStorageStatus import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .s3s_import_storage_status import S3SImportStorageStatus class S3SImportStorage(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/s3s_import_storage_status.py b/src/label_studio_sdk/types/s3s_import_storage_status.py index a8b9689a6..f3765ab47 100644 --- a/src/label_studio_sdk/types/s3s_import_storage_status.py +++ b/src/label_studio_sdk/types/s3s_import_storage_status.py @@ -3,6 +3,5 @@ import typing S3SImportStorageStatus = typing.Union[ - typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], - typing.Any, + typing.Literal["initialized", "queued", "in_progress", "failed", "completed"], typing.Any ] diff --git a/src/label_studio_sdk/types/serialization_option.py b/src/label_studio_sdk/types/serialization_option.py index 347950cba..961b0809a 100644 --- a/src/label_studio_sdk/types/serialization_option.py +++ b/src/label_studio_sdk/types/serialization_option.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class SerializationOption(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/serialization_options.py b/src/label_studio_sdk/types/serialization_options.py index 8d6f46bb0..08d9f9655 100644 --- a/src/label_studio_sdk/types/serialization_options.py +++ b/src/label_studio_sdk/types/serialization_options.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .serialization_option import SerializationOption + import pydantic import typing_extensions +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.serialization import FieldMetadata -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from .serialization_option import SerializationOption class SerializationOptions(UniversalBaseModel): @@ -18,8 +18,7 @@ class SerializationOptions(UniversalBaseModel): """ annotations_completed_by: typing_extensions.Annotated[ - typing.Optional[SerializationOption], - FieldMetadata(alias="annotations__completed_by"), + typing.Optional[SerializationOption], FieldMetadata(alias="annotations__completed_by") ] = None interpolate_key_frames: typing.Optional[bool] = pydantic.Field(default=None) """ diff --git a/src/label_studio_sdk/types/task.py b/src/label_studio_sdk/types/task.py index a0965e4fc..7e3d143f2 100644 --- a/src/label_studio_sdk/types/task.py +++ b/src/label_studio_sdk/types/task.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .task_annotators_item import TaskAnnotatorsItem -import datetime as dt from .task_comment_authors_item import TaskCommentAuthorsItem -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Task(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/task_filter_options.py b/src/label_studio_sdk/types/task_filter_options.py index 04b817727..023cce091 100644 --- a/src/label_studio_sdk/types/task_filter_options.py +++ b/src/label_studio_sdk/types/task_filter_options.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class TaskFilterOptions(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/user_simple.py b/src/label_studio_sdk/types/user_simple.py index ff7b3e16c..0d258fbcd 100644 --- a/src/label_studio_sdk/types/user_simple.py +++ b/src/label_studio_sdk/types/user_simple.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from ..core.pydantic_utilities import IS_PYDANTIC_V2 + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class UserSimple(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/view.py b/src/label_studio_sdk/types/view.py index 1713377c4..ea1104e21 100644 --- a/src/label_studio_sdk/types/view.py +++ b/src/label_studio_sdk/types/view.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel import typing -from .filter_group import FilterGroup + import pydantic -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .filter_group import FilterGroup class View(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/webhook.py b/src/label_studio_sdk/types/webhook.py index 569486be6..ad0fa4264 100644 --- a/src/label_studio_sdk/types/webhook.py +++ b/src/label_studio_sdk/types/webhook.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .webhook_actions_item import WebhookActionsItem -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 class Webhook(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/webhook_serializer_for_update.py b/src/label_studio_sdk/types/webhook_serializer_for_update.py index 832e01de7..782886d5f 100644 --- a/src/label_studio_sdk/types/webhook_serializer_for_update.py +++ b/src/label_studio_sdk/types/webhook_serializer_for_update.py @@ -1,13 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -from .webhook_serializer_for_update_actions_item import ( - WebhookSerializerForUpdateActionsItem, -) -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem class WebhookSerializerForUpdate(UniversalBaseModel): diff --git a/src/label_studio_sdk/types/workspace.py b/src/label_studio_sdk/types/workspace.py index e0adc186c..bda30027f 100644 --- a/src/label_studio_sdk/types/workspace.py +++ b/src/label_studio_sdk/types/workspace.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.pydantic_utilities import UniversalBaseModel +import datetime as dt import typing + import pydantic -import datetime as dt -from ..core.pydantic_utilities import IS_PYDANTIC_V2 +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class Workspace(UniversalBaseModel): diff --git a/src/label_studio_sdk/users/__init__.py b/src/label_studio_sdk/users/__init__.py index 92fd561f2..3c70756d1 100644 --- a/src/label_studio_sdk/users/__init__.py +++ b/src/label_studio_sdk/users/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import UsersGetTokenResponse, UsersResetTokenResponse __all__ = ["UsersGetTokenResponse", "UsersResetTokenResponse"] diff --git a/src/label_studio_sdk/users/client.py b/src/label_studio_sdk/users/client.py index 62618163a..a6733ce9a 100644 --- a/src/label_studio_sdk/users/client.py +++ b/src/label_studio_sdk/users/client.py @@ -1,16 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions -from .types.users_reset_token_response import UsersResetTokenResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from .types.users_get_token_response import UsersGetTokenResponse from ..types.base_user import BaseUser -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper +from .raw_client import AsyncRawUsersClient, RawUsersClient +from .types.users_get_token_response import UsersGetTokenResponse +from .types.users_reset_token_response import UsersResetTokenResponse # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,7 +15,18 @@ class UsersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawUsersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawUsersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawUsersClient + """ + return self._raw_client def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersResetTokenResponse: """ @@ -43,24 +51,8 @@ def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None ) client.users.reset_token() """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersResetTokenResponse, - parse_obj_as( - type_=UsersResetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.reset_token(request_options=request_options) + return _response.data def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersGetTokenResponse: """ @@ -85,24 +77,8 @@ def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) ) client.users.get_token() """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersGetTokenResponse, - parse_obj_as( - type_=UsersGetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get_token(request_options=request_options) + return _response.data def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -127,24 +103,8 @@ def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> ) client.users.whoami() """ - _response = self._client_wrapper.httpx_client.request( - "api/current-user/whoami", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.whoami(request_options=request_options) + return _response.data def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ @@ -170,24 +130,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.users.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/users/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[BaseUser], - parse_obj_as( - type_=typing.List[BaseUser], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -253,39 +197,19 @@ def create( ) client.users.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/users/", - method="POST", - json={ - "id": id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + id=id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -317,24 +241,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -368,18 +276,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -453,44 +351,36 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "id": users_update_request_id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + users_update_request_id=users_update_request_id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncUsersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawUsersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawUsersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawUsersClient + """ + return self._raw_client async def reset_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersResetTokenResponse: """ @@ -523,24 +413,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/reset-token/", - method="POST", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersResetTokenResponse, - parse_obj_as( - type_=UsersResetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.reset_token(request_options=request_options) + return _response.data async def get_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> UsersGetTokenResponse: """ @@ -573,24 +447,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/token", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - UsersGetTokenResponse, - parse_obj_as( - type_=UsersGetTokenResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get_token(request_options=request_options) + return _response.data async def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -623,24 +481,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/current-user/whoami", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.whoami(request_options=request_options) + return _response.data async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[BaseUser]: """ @@ -674,24 +516,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/users/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[BaseUser], - parse_obj_as( - type_=typing.List[BaseUser], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -765,39 +591,19 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/users/", - method="POST", - json={ - "id": id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + id=id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> BaseUser: """ @@ -837,24 +643,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -896,18 +686,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -989,36 +769,17 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/users/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "id": users_update_request_id, - "first_name": first_name, - "last_name": last_name, - "username": username, - "email": email, - "avatar": avatar, - "initials": initials, - "phone": phone, - "allow_newsletters": allow_newsletters, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + users_update_request_id=users_update_request_id, + first_name=first_name, + last_name=last_name, + username=username, + email=email, + avatar=avatar, + initials=initials, + phone=phone, + allow_newsletters=allow_newsletters, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - BaseUser, - parse_obj_as( - type_=BaseUser, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/users/raw_client.py b/src/label_studio_sdk/users/raw_client.py new file mode 100644 index 000000000..323c86805 --- /dev/null +++ b/src/label_studio_sdk/users/raw_client.py @@ -0,0 +1,833 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.base_user import BaseUser +from .types.users_get_token_response import UsersGetTokenResponse +from .types.users_reset_token_response import UsersResetTokenResponse + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawUsersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def reset_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[UsersResetTokenResponse]: + """ + Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[UsersResetTokenResponse] + User token response + """ + _response = self._client_wrapper.httpx_client.request( + "api/current-user/reset-token/", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[UsersGetTokenResponse]: + """ + Get a access token to authenticate to the API as the current user. To find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[UsersGetTokenResponse] + User token response + """ + _response = self._client_wrapper.httpx_client.request( + "api/current-user/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[BaseUser]: + """ + Get information about your user account, such as your username, email, and user ID. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/current-user/whoami", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[BaseUser]]: + """ + + List all users in your Label Studio organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[BaseUser]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/users/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseUser]: + """ + + Create a user in Label Studio. + + Parameters + ---------- + id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/users/", + method="POST", + json={ + "id": id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[BaseUser]: + """ + + Get info about a specific Label Studio user. + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific Label Studio user. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + users_update_request_id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[BaseUser]: + """ + + Update details for a specific Label Studio user, such as their name or contact information. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + users_update_request_id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[BaseUser] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "id": users_update_request_id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawUsersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def reset_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[UsersResetTokenResponse]: + """ + Reset your access token or API key. When reset, any scripts or automations you have in place will need to be updated with the new key. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[UsersResetTokenResponse] + User token response + """ + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/reset-token/", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersResetTokenResponse, + parse_obj_as( + type_=UsersResetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get_token( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[UsersGetTokenResponse]: + """ + Get a access token to authenticate to the API as the current user. To find this in the Label Studio interface, click **Account & Settings** in the upper right. For more information, see [Access Token](https://labelstud.io/guide/user_account#Access-token). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[UsersGetTokenResponse] + User token response + """ + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/token", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + UsersGetTokenResponse, + parse_obj_as( + type_=UsersGetTokenResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def whoami(self, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[BaseUser]: + """ + Get information about your user account, such as your username, email, and user ID. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/current-user/whoami", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[BaseUser]]: + """ + + List all users in your Label Studio organization. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[BaseUser]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/users/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[BaseUser], + parse_obj_as( + type_=typing.List[BaseUser], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseUser]: + """ + + Create a user in Label Studio. + + Parameters + ---------- + id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/users/", + method="POST", + json={ + "id": id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[BaseUser]: + """ + + Get info about a specific Label Studio user. + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific Label Studio user. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Use caution when deleting a user, as this can cause issues such as breaking the "Annotated by" filter or leaving orphaned records. + + Parameters + ---------- + id : int + User ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + users_update_request_id: typing.Optional[int] = OMIT, + first_name: typing.Optional[str] = OMIT, + last_name: typing.Optional[str] = OMIT, + username: typing.Optional[str] = OMIT, + email: typing.Optional[str] = OMIT, + avatar: typing.Optional[str] = OMIT, + initials: typing.Optional[str] = OMIT, + phone: typing.Optional[str] = OMIT, + allow_newsletters: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[BaseUser]: + """ + + Update details for a specific Label Studio user, such as their name or contact information. + + You will need to provide their user ID. You can find a list of all user IDs using [List users](list). + + Parameters + ---------- + id : int + User ID + + users_update_request_id : typing.Optional[int] + User ID + + first_name : typing.Optional[str] + First name of the user + + last_name : typing.Optional[str] + Last name of the user + + username : typing.Optional[str] + Username of the user + + email : typing.Optional[str] + Email of the user + + avatar : typing.Optional[str] + Avatar URL of the user + + initials : typing.Optional[str] + Initials of the user + + phone : typing.Optional[str] + Phone number of the user + + allow_newsletters : typing.Optional[bool] + Whether the user allows newsletters + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[BaseUser] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/users/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "id": users_update_request_id, + "first_name": first_name, + "last_name": last_name, + "username": username, + "email": email, + "avatar": avatar, + "initials": initials, + "phone": phone, + "allow_newsletters": allow_newsletters, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + BaseUser, + parse_obj_as( + type_=BaseUser, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/users/types/__init__.py b/src/label_studio_sdk/users/types/__init__.py index 69c55eb33..4aa8d7683 100644 --- a/src/label_studio_sdk/users/types/__init__.py +++ b/src/label_studio_sdk/users/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .users_get_token_response import UsersGetTokenResponse from .users_reset_token_response import UsersResetTokenResponse diff --git a/src/label_studio_sdk/users/types/users_get_token_response.py b/src/label_studio_sdk/users/types/users_get_token_response.py index 815096b07..fe19030d1 100644 --- a/src/label_studio_sdk/users/types/users_get_token_response.py +++ b/src/label_studio_sdk/users/types/users_get_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class UsersGetTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/users/types/users_reset_token_response.py b/src/label_studio_sdk/users/types/users_reset_token_response.py index 0bdf5a8fe..a038930ce 100644 --- a/src/label_studio_sdk/users/types/users_reset_token_response.py +++ b/src/label_studio_sdk/users/types/users_reset_token_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2 +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class UsersResetTokenResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/versions/__init__.py b/src/label_studio_sdk/versions/__init__.py index e3626239c..64e73bb6c 100644 --- a/src/label_studio_sdk/versions/__init__.py +++ b/src/label_studio_sdk/versions/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import VersionsGetResponse, VersionsGetResponseEdition __all__ = ["VersionsGetResponse", "VersionsGetResponseEdition"] diff --git a/src/label_studio_sdk/versions/client.py b/src/label_studio_sdk/versions/client.py index f9d9df035..4584d7665 100644 --- a/src/label_studio_sdk/versions/client.py +++ b/src/label_studio_sdk/versions/client.py @@ -1,18 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -from ..core.client_wrapper import SyncClientWrapper import typing + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions +from .raw_client import AsyncRawVersionsClient, RawVersionsClient from .types.versions_get_response import VersionsGetResponse -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.client_wrapper import AsyncClientWrapper class VersionsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawVersionsClient + """ + return self._raw_client def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> VersionsGetResponse: """ @@ -37,29 +46,24 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Ver ) client.versions.get() """ - _response = self._client_wrapper.httpx_client.request( - "api/version", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - VersionsGetResponse, - parse_obj_as( - type_=VersionsGetResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(request_options=request_options) + return _response.data class AsyncVersionsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawVersionsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawVersionsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawVersionsClient + """ + return self._raw_client async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> VersionsGetResponse: """ @@ -92,21 +96,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/version", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - VersionsGetResponse, - parse_obj_as( - type_=VersionsGetResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/versions/raw_client.py b/src/label_studio_sdk/versions/raw_client.py new file mode 100644 index 000000000..4ad3ed634 --- /dev/null +++ b/src/label_studio_sdk/versions/raw_client.py @@ -0,0 +1,91 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from .types.versions_get_response import VersionsGetResponse + + +class RawVersionsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[VersionsGetResponse]: + """ + Get version information about the Label Studio instance. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[VersionsGetResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/version", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + VersionsGetResponse, + parse_obj_as( + type_=VersionsGetResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawVersionsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[VersionsGetResponse]: + """ + Get version information about the Label Studio instance. + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[VersionsGetResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/version", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + VersionsGetResponse, + parse_obj_as( + type_=VersionsGetResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/versions/types/__init__.py b/src/label_studio_sdk/versions/types/__init__.py index 852040b2d..a4dddd9ad 100644 --- a/src/label_studio_sdk/versions/types/__init__.py +++ b/src/label_studio_sdk/versions/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .versions_get_response import VersionsGetResponse from .versions_get_response_edition import VersionsGetResponseEdition diff --git a/src/label_studio_sdk/versions/types/versions_get_response.py b/src/label_studio_sdk/versions/types/versions_get_response.py index 1e3178579..3ef46f094 100644 --- a/src/label_studio_sdk/versions/types/versions_get_response.py +++ b/src/label_studio_sdk/versions/types/versions_get_response.py @@ -1,12 +1,12 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing + import pydantic import typing_extensions +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata from .versions_get_response_edition import VersionsGetResponseEdition -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class VersionsGetResponse(UniversalBaseModel): @@ -16,24 +16,21 @@ class VersionsGetResponse(UniversalBaseModel): """ label_studio_os_package: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], - FieldMetadata(alias="label-studio-os-package"), + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-os-package") ] = pydantic.Field(default=None) """ Information about the Label Studio open source package """ label_studio_os_backend: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], - FieldMetadata(alias="label-studio-os-backend"), + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-os-backend") ] = pydantic.Field(default=None) """ Information about the Label Studio backend """ label_studio_frontend: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], - FieldMetadata(alias="label-studio-frontend"), + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-frontend") ] = pydantic.Field(default=None) """ Information about the Label Studio frontend @@ -45,8 +42,7 @@ class VersionsGetResponse(UniversalBaseModel): """ label_studio_converter: typing_extensions.Annotated[ - typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], - FieldMetadata(alias="label-studio-converter"), + typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="label-studio-converter") ] = pydantic.Field(default=None) """ Information about the Label Studio converter component diff --git a/src/label_studio_sdk/views/__init__.py b/src/label_studio_sdk/views/__init__.py index 498ccadb3..9fc67fadc 100644 --- a/src/label_studio_sdk/views/__init__.py +++ b/src/label_studio_sdk/views/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import ( ViewsCreateRequestData, ViewsCreateRequestDataFilters, diff --git a/src/label_studio_sdk/views/client.py b/src/label_studio_sdk/views/client.py index ff5b7e607..662ad680d 100644 --- a/src/label_studio_sdk/views/client.py +++ b/src/label_studio_sdk/views/client.py @@ -1,17 +1,13 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.view import View -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError +from .raw_client import AsyncRawViewsClient, RawViewsClient from .types.views_create_request_data import ViewsCreateRequestData -from ..core.serialization import convert_and_respect_annotation_metadata -from ..core.jsonable_encoder import jsonable_encoder from .types.views_update_request_data import ViewsUpdateRequestData -from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -19,13 +15,21 @@ class ViewsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawViewsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawViewsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawViewsClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[View]: """ @@ -55,27 +59,8 @@ def list( ) client.views.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[View], - parse_obj_as( - type_=typing.List[View], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -115,34 +100,8 @@ def create( ) client.views.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="POST", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsCreateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -173,25 +132,8 @@ def delete_all(self, *, project: int, request_options: typing.Optional[RequestOp project=1, ) """ - _response = self._client_wrapper.httpx_client.request( - "api/dm/views/reset/", - method="DELETE", - json={ - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete_all(project=project, request_options=request_options) + return _response.data def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ @@ -222,24 +164,8 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -268,18 +194,8 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -323,45 +239,27 @@ def update( id="id", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsUpdateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data class AsyncViewsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawViewsClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawViewsClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawViewsClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[int] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[View]: """ @@ -399,27 +297,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[View], - parse_obj_as( - type_=typing.List[View], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -467,34 +346,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/", - method="POST", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsCreateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(data=data, project=project, request_options=request_options) + return _response.data async def delete_all(self, *, project: int, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -533,25 +386,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/dm/views/reset/", - method="DELETE", - json={ - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete_all(project=project, request_options=request_options) + return _response.data async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> View: """ @@ -590,24 +426,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -644,18 +464,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -707,31 +517,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/dm/views/{jsonable_encoder(id)}/", - method="PATCH", - json={ - "data": convert_and_respect_annotation_metadata( - object_=data, annotation=ViewsUpdateRequestData, direction="write" - ), - "project": project, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - View, - parse_obj_as( - type_=View, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.update(id, data=data, project=project, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/views/raw_client.py b/src/label_studio_sdk/views/raw_client.py new file mode 100644 index 000000000..a778be49f --- /dev/null +++ b/src/label_studio_sdk/views/raw_client.py @@ -0,0 +1,574 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..core.serialization import convert_and_respect_annotation_metadata +from ..types.view import View +from .types.views_create_request_data import ViewsCreateRequestData +from .types.views_update_request_data import ViewsUpdateRequestData + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawViewsClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[View]]: + """ + + List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[View]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + data: typing.Optional[ViewsCreateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[View]: + """ + + Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[ViewsCreateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[View] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="POST", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete_all( + self, *, project: int, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/dm/views/reset/", + method="DELETE", + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[View]: + """ + + Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[View] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: str, + *, + data: typing.Optional[ViewsUpdateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[View]: + """ + + You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + data : typing.Optional[ViewsUpdateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[View] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawViewsClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[int] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[View]]: + """ + + List all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize which tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[View]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[View], + parse_obj_as( + type_=typing.List[View], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + data: typing.Optional[ViewsCreateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[View]: + """ + + Create a new Data Manager view for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks and information appears. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + data : typing.Optional[ViewsCreateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[View] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/", + method="POST", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsCreateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete_all( + self, *, project: int, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete all views for a specific project. A view is a tab in the Data Manager where you can set filters and customize what tasks appear. + + You will need to provide the project ID. You can find this in the URL when viewing the project in Label Studio, or you can use [List all projects](../projects/list). + + Parameters + ---------- + project : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/dm/views/reset/", + method="DELETE", + json={ + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> AsyncHttpResponse[View]: + """ + + Get the details about a specific Data Manager view (tab). You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[View] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + Delete a specific Data Manager view (tab) by ID. You can find the view using [List views](list). + + Parameters + ---------- + id : str + View ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: str, + *, + data: typing.Optional[ViewsUpdateRequestData] = OMIT, + project: typing.Optional[int] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[View]: + """ + + You can update a specific Data Manager view (tab) with additional filters and other customizations. You will need to supply the view ID. You can find this using [List views](list). + + Parameters + ---------- + id : str + View ID + + data : typing.Optional[ViewsUpdateRequestData] + Custom view data + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[View] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/dm/views/{jsonable_encoder(id)}/", + method="PATCH", + json={ + "data": convert_and_respect_annotation_metadata( + object_=data, annotation=ViewsUpdateRequestData, direction="write" + ), + "project": project, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + View, + parse_obj_as( + type_=View, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/views/types/__init__.py b/src/label_studio_sdk/views/types/__init__.py index 56164fc06..0404fff5e 100644 --- a/src/label_studio_sdk/views/types/__init__.py +++ b/src/label_studio_sdk/views/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .views_create_request_data import ViewsCreateRequestData from .views_create_request_data_filters import ViewsCreateRequestDataFilters from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction diff --git a/src/label_studio_sdk/views/types/views_create_request_data.py b/src/label_studio_sdk/views/types/views_create_request_data.py index e5a689a01..e7020d25a 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data.py +++ b/src/label_studio_sdk/views/types/views_create_request_data.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from .views_create_request_data_filters import ViewsCreateRequestDataFilters + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_create_request_data_filters import ViewsCreateRequestDataFilters from .views_create_request_data_ordering_item import ViewsCreateRequestDataOrderingItem -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsCreateRequestData(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters.py b/src/label_studio_sdk/views/types/views_create_request_data_filters.py index db954fc98..f0ea6bfa6 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters.py @@ -1,15 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_create_request_data_filters_conjunction import ( - ViewsCreateRequestDataFiltersConjunction, -) -import pydantic import typing -from .views_create_request_data_filters_items_item import ( - ViewsCreateRequestDataFiltersItemsItem, -) -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_create_request_data_filters_conjunction import ViewsCreateRequestDataFiltersConjunction +from .views_create_request_data_filters_items_item import ViewsCreateRequestDataFiltersItemsItem class ViewsCreateRequestDataFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py index 5d9620fa2..a7280b1b3 100644 --- a/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_create_request_data_filters_items_item.py @@ -1,19 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_create_request_data_filters_items_item_filter import ( - ViewsCreateRequestDataFiltersItemsItemFilter, -) -import pydantic -from .views_create_request_data_filters_items_item_operator import ( - ViewsCreateRequestDataFiltersItemsItemOperator, -) -from .views_create_request_data_filters_items_item_value import ( - ViewsCreateRequestDataFiltersItemsItemValue, -) -from ...core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_create_request_data_filters_items_item_filter import ViewsCreateRequestDataFiltersItemsItemFilter +from .views_create_request_data_filters_items_item_operator import ViewsCreateRequestDataFiltersItemsItemOperator +from .views_create_request_data_filters_items_item_value import ViewsCreateRequestDataFiltersItemsItemValue + class ViewsCreateRequestDataFiltersItemsItem(UniversalBaseModel): filter: ViewsCreateRequestDataFiltersItemsItemFilter = pydantic.Field() diff --git a/src/label_studio_sdk/views/types/views_update_request_data.py b/src/label_studio_sdk/views/types/views_update_request_data.py index 1a0ceda3c..0b44365a7 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data.py +++ b/src/label_studio_sdk/views/types/views_update_request_data.py @@ -1,11 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel import typing -from .views_update_request_data_filters import ViewsUpdateRequestDataFilters + import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_update_request_data_filters import ViewsUpdateRequestDataFilters from .views_update_request_data_ordering_item import ViewsUpdateRequestDataOrderingItem -from ...core.pydantic_utilities import IS_PYDANTIC_V2 class ViewsUpdateRequestData(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters.py b/src/label_studio_sdk/views/types/views_update_request_data_filters.py index aa150835b..f4fc71c12 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters.py @@ -1,15 +1,11 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_update_request_data_filters_conjunction import ( - ViewsUpdateRequestDataFiltersConjunction, -) -import pydantic import typing -from .views_update_request_data_filters_items_item import ( - ViewsUpdateRequestDataFiltersItemsItem, -) -from ...core.pydantic_utilities import IS_PYDANTIC_V2 + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_update_request_data_filters_conjunction import ViewsUpdateRequestDataFiltersConjunction +from .views_update_request_data_filters_items_item import ViewsUpdateRequestDataFiltersItemsItem class ViewsUpdateRequestDataFilters(UniversalBaseModel): diff --git a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py index 6936767e2..bbf3aeab9 100644 --- a/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py +++ b/src/label_studio_sdk/views/types/views_update_request_data_filters_items_item.py @@ -1,19 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -from ...core.pydantic_utilities import UniversalBaseModel -from .views_update_request_data_filters_items_item_filter import ( - ViewsUpdateRequestDataFiltersItemsItemFilter, -) -import pydantic -from .views_update_request_data_filters_items_item_operator import ( - ViewsUpdateRequestDataFiltersItemsItemOperator, -) -from .views_update_request_data_filters_items_item_value import ( - ViewsUpdateRequestDataFiltersItemsItemValue, -) -from ...core.pydantic_utilities import IS_PYDANTIC_V2 import typing +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .views_update_request_data_filters_items_item_filter import ViewsUpdateRequestDataFiltersItemsItemFilter +from .views_update_request_data_filters_items_item_operator import ViewsUpdateRequestDataFiltersItemsItemOperator +from .views_update_request_data_filters_items_item_value import ViewsUpdateRequestDataFiltersItemsItemValue + class ViewsUpdateRequestDataFiltersItemsItem(UniversalBaseModel): filter: ViewsUpdateRequestDataFiltersItemsItemFilter = pydantic.Field() diff --git a/src/label_studio_sdk/webhooks/__init__.py b/src/label_studio_sdk/webhooks/__init__.py index 338fdac42..2a2583ff5 100644 --- a/src/label_studio_sdk/webhooks/__init__.py +++ b/src/label_studio_sdk/webhooks/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import WebhooksUpdateRequestActionsItem __all__ = ["WebhooksUpdateRequestActionsItem"] diff --git a/src/label_studio_sdk/webhooks/client.py b/src/label_studio_sdk/webhooks/client.py index 8fa48c0de..c752a19ad 100644 --- a/src/label_studio_sdk/webhooks/client.py +++ b/src/label_studio_sdk/webhooks/client.py @@ -1,21 +1,16 @@ # This file was auto-generated by Fern from our API Definition. +import datetime as dt import typing -from ..core.client_wrapper import SyncClientWrapper + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.webhook import Webhook -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError from ..types.webhook_actions_item import WebhookActionsItem -import datetime as dt -from ..core.jsonable_encoder import jsonable_encoder -from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem -from ..types.webhook_serializer_for_update_actions_item import ( - WebhookSerializerForUpdateActionsItem, -) from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate -from ..core.client_wrapper import AsyncClientWrapper +from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +from .raw_client import AsyncRawWebhooksClient, RawWebhooksClient +from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,13 +18,21 @@ class WebhooksClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawWebhooksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawWebhooksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawWebhooksClient + """ + return self._raw_client def list( - self, - *, - project: typing.Optional[str] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Webhook]: """ @@ -61,27 +64,8 @@ def list( ) client.webhooks.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Webhook], - parse_obj_as( - type_=typing.List[Webhook], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(project=project, request_options=request_options) + return _response.data def create( self, @@ -158,38 +142,21 @@ def create( url="url", ) """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="POST", - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = self._raw_client.create( + url=url, + id=id, + organization=organization, + project=project, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def info( self, @@ -222,21 +189,8 @@ def info( ) client.webhooks.info() """ - _response = self._client_wrapper.httpx_client.request( - "api/webhooks/info/", - method="GET", - params={ - "organization-only": organization_only, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.info(organization_only=organization_only, request_options=request_options) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ @@ -269,24 +223,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -318,18 +256,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -342,10 +270,7 @@ def update( headers: typing.Optional[str] = None, is_active: typing.Optional[bool] = None, actions: typing.Optional[ - typing.Union[ - WebhooksUpdateRequestActionsItem, - typing.Sequence[WebhooksUpdateRequestActionsItem], - ] + typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] ] = None, id: typing.Optional[int] = OMIT, organization: typing.Optional[int] = OMIT, @@ -439,57 +364,47 @@ def update( webhook_serializer_for_update_url="url", ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id_)}/", - method="PATCH", - params={ - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - }, - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = self._raw_client.update( + id_, + url=url, + webhook_serializer_for_update_url=webhook_serializer_for_update_url, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + id=id, + organization=organization, + project=project, + webhook_serializer_for_update_send_payload=webhook_serializer_for_update_send_payload, + webhook_serializer_for_update_send_for_all_actions=webhook_serializer_for_update_send_for_all_actions, + webhook_serializer_for_update_headers=webhook_serializer_for_update_headers, + webhook_serializer_for_update_is_active=webhook_serializer_for_update_is_active, + webhook_serializer_for_update_actions=webhook_serializer_for_update_actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - WebhookSerializerForUpdate, - parse_obj_as( - type_=WebhookSerializerForUpdate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncWebhooksClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawWebhooksClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawWebhooksClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawWebhooksClient + """ + return self._raw_client async def list( - self, - *, - project: typing.Optional[str] = None, - request_options: typing.Optional[RequestOptions] = None, + self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None ) -> typing.List[Webhook]: """ @@ -529,27 +444,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="GET", - params={ - "project": project, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Webhook], - parse_obj_as( - type_=typing.List[Webhook], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(project=project, request_options=request_options) + return _response.data async def create( self, @@ -634,38 +530,21 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/", - method="POST", - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = await self._raw_client.create( + url=url, + id=id, + organization=organization, + project=project, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def info( self, @@ -706,21 +585,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/webhooks/info/", - method="GET", - params={ - "organization-only": organization_only, - }, - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.info(organization_only=organization_only, request_options=request_options) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Webhook: """ @@ -761,24 +627,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Webhook, - parse_obj_as( - type_=Webhook, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -818,18 +668,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id)}/", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -842,10 +682,7 @@ async def update( headers: typing.Optional[str] = None, is_active: typing.Optional[bool] = None, actions: typing.Optional[ - typing.Union[ - WebhooksUpdateRequestActionsItem, - typing.Sequence[WebhooksUpdateRequestActionsItem], - ] + typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] ] = None, id: typing.Optional[int] = OMIT, organization: typing.Optional[int] = OMIT, @@ -947,43 +784,25 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/webhooks/{jsonable_encoder(id_)}/", - method="PATCH", - params={ - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - }, - json={ - "id": id, - "organization": organization, - "project": project, - "url": url, - "send_payload": send_payload, - "send_for_all_actions": send_for_all_actions, - "headers": headers, - "is_active": is_active, - "actions": actions, - "created_at": created_at, - "updated_at": updated_at, - }, + _response = await self._raw_client.update( + id_, + url=url, + webhook_serializer_for_update_url=webhook_serializer_for_update_url, + send_payload=send_payload, + send_for_all_actions=send_for_all_actions, + headers=headers, + is_active=is_active, + actions=actions, + id=id, + organization=organization, + project=project, + webhook_serializer_for_update_send_payload=webhook_serializer_for_update_send_payload, + webhook_serializer_for_update_send_for_all_actions=webhook_serializer_for_update_send_for_all_actions, + webhook_serializer_for_update_headers=webhook_serializer_for_update_headers, + webhook_serializer_for_update_is_active=webhook_serializer_for_update_is_active, + webhook_serializer_for_update_actions=webhook_serializer_for_update_actions, + created_at=created_at, + updated_at=updated_at, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - WebhookSerializerForUpdate, - parse_obj_as( - type_=WebhookSerializerForUpdate, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/webhooks/raw_client.py b/src/label_studio_sdk/webhooks/raw_client.py new file mode 100644 index 000000000..0953cf927 --- /dev/null +++ b/src/label_studio_sdk/webhooks/raw_client.py @@ -0,0 +1,824 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.webhook import Webhook +from ..types.webhook_actions_item import WebhookActionsItem +from ..types.webhook_serializer_for_update import WebhookSerializerForUpdate +from ..types.webhook_serializer_for_update_actions_item import WebhookSerializerForUpdateActionsItem +from .types.webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawWebhooksClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[Webhook]]: + """ + + List all webhooks set up for your organization. + + Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. + + For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). + + Parameters + ---------- + project : typing.Optional[str] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Webhook]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + url: str, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + send_payload: typing.Optional[bool] = OMIT, + send_for_all_actions: typing.Optional[bool] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + is_active: typing.Optional[bool] = OMIT, + actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Webhook]: + """ + + Create a webhook. + Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). + + If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). + + Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. + + Parameters + ---------- + url : str + URL of webhook + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Sequence[WebhookActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Webhook] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def info( + self, + *, + organization_only: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[None]: + """ + + Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + organization_only : typing.Optional[bool] + organization-only or not + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + "api/webhooks/info/", + method="GET", + params={ + "organization-only": organization_only, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Webhook]: + """ + + Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Webhook] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id_: int, + *, + url: str, + webhook_serializer_for_update_url: str, + send_payload: typing.Optional[bool] = None, + send_for_all_actions: typing.Optional[bool] = None, + headers: typing.Optional[str] = None, + is_active: typing.Optional[bool] = None, + actions: typing.Optional[ + typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] + ] = None, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_actions: typing.Optional[ + typing.Sequence[WebhookSerializerForUpdateActionsItem] + ] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[WebhookSerializerForUpdate]: + """ + + Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id_ : int + A unique integer value identifying this webhook. + + url : str + URL of webhook + + webhook_serializer_for_update_url : str + URL of webhook + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[str] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + webhook_serializer_for_update_send_payload : typing.Optional[bool] + If value is False send only action + + webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + webhook_serializer_for_update_is_active : typing.Optional[bool] + If value is False the webhook is disabled + + webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[WebhookSerializerForUpdate] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id_)}/", + method="PATCH", + params={ + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + }, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawWebhooksClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, project: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Webhook]]: + """ + + List all webhooks set up for your organization. + + Webhooks in Label Studio let you set up integrations that subscribe to certain events that occur inside Label Studio. When an event is triggered, Label Studio sends an HTTP POST request to the configured webhook URL. + + For more information, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks). + + Parameters + ---------- + project : typing.Optional[str] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Webhook]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="GET", + params={ + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Webhook], + parse_obj_as( + type_=typing.List[Webhook], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + url: str, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + send_payload: typing.Optional[bool] = OMIT, + send_for_all_actions: typing.Optional[bool] = OMIT, + headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + is_active: typing.Optional[bool] = OMIT, + actions: typing.Optional[typing.Sequence[WebhookActionsItem]] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Webhook]: + """ + + Create a webhook. + Label Studio provides several out-of-the box webhook events, which you can find listed here: [Available Label Studio webhooks](https://labelstud.io/guide/webhooks#Available-Label-Studio-webhooks). + + If you want to create your own custom webhook, refer to [Create custom events for webhooks in Label Studio](https://labelstud.io/guide/webhook_create). + + Label Studio makes two main types of events available to integrate with webhooks: project-level task events and organization events. If you want to use organization-level webhook events, you will need to set `LABEL_STUDIO_ALLOW_ORGANIZATION_WEBHOOKS=true`. + + Parameters + ---------- + url : str + URL of webhook + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Sequence[WebhookActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Webhook] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/", + method="POST", + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def info( + self, + *, + organization_only: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[None]: + """ + + Get descriptions of all available webhook actions to set up webhooks. For more information, see the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + organization_only : typing.Optional[bool] + organization-only or not + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + "api/webhooks/info/", + method="GET", + params={ + "organization-only": organization_only, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Webhook]: + """ + + Get information about a specific webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Webhook] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Webhook, + parse_obj_as( + type_=Webhook, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id : int + A unique integer value identifying this webhook. + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id)}/", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id_: int, + *, + url: str, + webhook_serializer_for_update_url: str, + send_payload: typing.Optional[bool] = None, + send_for_all_actions: typing.Optional[bool] = None, + headers: typing.Optional[str] = None, + is_active: typing.Optional[bool] = None, + actions: typing.Optional[ + typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]] + ] = None, + id: typing.Optional[int] = OMIT, + organization: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + webhook_serializer_for_update_send_payload: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_send_for_all_actions: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_headers: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT, + webhook_serializer_for_update_is_active: typing.Optional[bool] = OMIT, + webhook_serializer_for_update_actions: typing.Optional[ + typing.Sequence[WebhookSerializerForUpdateActionsItem] + ] = OMIT, + created_at: typing.Optional[dt.datetime] = OMIT, + updated_at: typing.Optional[dt.datetime] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[WebhookSerializerForUpdate]: + """ + + Update a webhook. You will need to provide the webhook ID. You can get this from [List all webhooks](list). + + For more information about webhooks, see [Set up webhooks in Label Studio](https://labelstud.io/guide/webhooks) and the [Webhook event reference](https://labelstud.io/guide/webhook_reference). + + Parameters + ---------- + id_ : int + A unique integer value identifying this webhook. + + url : str + URL of webhook + + webhook_serializer_for_update_url : str + URL of webhook + + send_payload : typing.Optional[bool] + If value is False send only action + + send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + headers : typing.Optional[str] + Key Value Json of headers + + is_active : typing.Optional[bool] + If value is False the webhook is disabled + + actions : typing.Optional[typing.Union[WebhooksUpdateRequestActionsItem, typing.Sequence[WebhooksUpdateRequestActionsItem]]] + + id : typing.Optional[int] + + organization : typing.Optional[int] + + project : typing.Optional[int] + + webhook_serializer_for_update_send_payload : typing.Optional[bool] + If value is False send only action + + webhook_serializer_for_update_send_for_all_actions : typing.Optional[bool] + If value is False - used only for actions from WebhookAction + + webhook_serializer_for_update_headers : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] + Key Value Json of headers + + webhook_serializer_for_update_is_active : typing.Optional[bool] + If value is False the webhook is disabled + + webhook_serializer_for_update_actions : typing.Optional[typing.Sequence[WebhookSerializerForUpdateActionsItem]] + + created_at : typing.Optional[dt.datetime] + Creation time + + updated_at : typing.Optional[dt.datetime] + Last update time + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[WebhookSerializerForUpdate] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/webhooks/{jsonable_encoder(id_)}/", + method="PATCH", + params={ + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + }, + json={ + "id": id, + "organization": organization, + "project": project, + "url": url, + "send_payload": send_payload, + "send_for_all_actions": send_for_all_actions, + "headers": headers, + "is_active": is_active, + "actions": actions, + "created_at": created_at, + "updated_at": updated_at, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + WebhookSerializerForUpdate, + parse_obj_as( + type_=WebhookSerializerForUpdate, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/webhooks/types/__init__.py b/src/label_studio_sdk/webhooks/types/__init__.py index 5c47f8599..2acf204b9 100644 --- a/src/label_studio_sdk/webhooks/types/__init__.py +++ b/src/label_studio_sdk/webhooks/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .webhooks_update_request_actions_item import WebhooksUpdateRequestActionsItem __all__ = ["WebhooksUpdateRequestActionsItem"] diff --git a/src/label_studio_sdk/workspaces/__init__.py b/src/label_studio_sdk/workspaces/__init__.py index ddc7fa13c..795aaf483 100644 --- a/src/label_studio_sdk/workspaces/__init__.py +++ b/src/label_studio_sdk/workspaces/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from . import members from .members import MembersCreateResponse, MembersListResponseItem diff --git a/src/label_studio_sdk/workspaces/client.py b/src/label_studio_sdk/workspaces/client.py index 413ac6b3a..2de101cc5 100644 --- a/src/label_studio_sdk/workspaces/client.py +++ b/src/label_studio_sdk/workspaces/client.py @@ -1,16 +1,12 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ..core.client_wrapper import SyncClientWrapper -from .members.client import MembersClient + +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.request_options import RequestOptions from ..types.workspace import Workspace -from ..core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ..core.api_error import ApiError -from ..core.jsonable_encoder import jsonable_encoder -from ..core.client_wrapper import AsyncClientWrapper -from .members.client import AsyncMembersClient +from .members.client import AsyncMembersClient, MembersClient +from .raw_client import AsyncRawWorkspacesClient, RawWorkspacesClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -18,8 +14,19 @@ class WorkspacesClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - self.members = MembersClient(client_wrapper=self._client_wrapper) + self._raw_client = RawWorkspacesClient(client_wrapper=client_wrapper) + self.members = MembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawWorkspacesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawWorkspacesClient + """ + return self._raw_client def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ @@ -49,24 +56,8 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty ) client.workspaces.list() """ - _response = self._client_wrapper.httpx_client.request( - "api/workspaces", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Workspace], - parse_obj_as( - type_=typing.List[Workspace], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(request_options=request_options) + return _response.data def create( self, @@ -124,36 +115,16 @@ def create( ) client.workspaces.create() """ - _response = self._client_wrapper.httpx_client.request( - "api/workspaces", - method="POST", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.create( + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ @@ -184,24 +155,8 @@ def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = Non id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.get(id, request_options=request_options) + return _response.data def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -231,18 +186,8 @@ def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, request_options=request_options) + return _response.data def update( self, @@ -302,42 +247,34 @@ def update( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = self._raw_client.update( + id, + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data class AsyncWorkspacesClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - self.members = AsyncMembersClient(client_wrapper=self._client_wrapper) + self._raw_client = AsyncRawWorkspacesClient(client_wrapper=client_wrapper) + self.members = AsyncMembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawWorkspacesClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawWorkspacesClient + """ + return self._raw_client async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[Workspace]: """ @@ -375,24 +312,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/workspaces", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[Workspace], - parse_obj_as( - type_=typing.List[Workspace], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(request_options=request_options) + return _response.data async def create( self, @@ -458,36 +379,16 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - "api/workspaces", - method="POST", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.create( + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> Workspace: """ @@ -526,24 +427,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.get(id, request_options=request_options) + return _response.data async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: """ @@ -581,18 +466,8 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="DELETE", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, request_options=request_options) + return _response.data async def update( self, @@ -660,33 +535,14 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}", - method="PATCH", - json={ - "title": title, - "description": description, - "is_public": is_public, - "is_personal": is_personal, - "color": color, - "is_archived": is_archived, - }, - headers={ - "content-type": "application/json", - }, + _response = await self._raw_client.update( + id, + title=title, + description=description, + is_public=is_public, + is_personal=is_personal, + color=color, + is_archived=is_archived, request_options=request_options, - omit=OMIT, ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - Workspace, - parse_obj_as( - type_=Workspace, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + return _response.data diff --git a/src/label_studio_sdk/workspaces/members/__init__.py b/src/label_studio_sdk/workspaces/members/__init__.py index 2e3a8f37d..a527383e7 100644 --- a/src/label_studio_sdk/workspaces/members/__init__.py +++ b/src/label_studio_sdk/workspaces/members/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .types import MembersCreateResponse, MembersListResponseItem __all__ = ["MembersCreateResponse", "MembersListResponseItem"] diff --git a/src/label_studio_sdk/workspaces/members/client.py b/src/label_studio_sdk/workspaces/members/client.py index 8335b4483..21e304759 100644 --- a/src/label_studio_sdk/workspaces/members/client.py +++ b/src/label_studio_sdk/workspaces/members/client.py @@ -1,15 +1,12 @@ # This file was auto-generated by Fern from our API Definition. import typing -from ...core.client_wrapper import SyncClientWrapper + +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions -from .types.members_list_response_item import MembersListResponseItem -from ...core.jsonable_encoder import jsonable_encoder -from ...core.pydantic_utilities import parse_obj_as -from json.decoder import JSONDecodeError -from ...core.api_error import ApiError +from .raw_client import AsyncRawMembersClient, RawMembersClient from .types.members_create_response import MembersCreateResponse -from ...core.client_wrapper import AsyncClientWrapper +from .types.members_list_response_item import MembersListResponseItem # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -17,7 +14,18 @@ class MembersClient: def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = RawMembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> RawMembersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawMembersClient + """ + return self._raw_client def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -50,31 +58,11 @@ def list( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MembersListResponseItem], - parse_obj_as( - type_=typing.List[MembersListResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.list(id, request_options=request_options) + return _response.data def create( - self, - id: int, - *, - user: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> MembersCreateResponse: """ @@ -107,38 +95,11 @@ def create( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="POST", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MembersCreateResponse, - parse_obj_as( - type_=MembersCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.create(id, user=user, request_options=request_options) + return _response.data def delete( - self, - id: int, - *, - user: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> None: """ @@ -170,30 +131,24 @@ def delete( id=1, ) """ - _response = self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="DELETE", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = self._raw_client.delete(id, user=user, request_options=request_options) + return _response.data class AsyncMembersClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper + self._raw_client = AsyncRawMembersClient(client_wrapper=client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawMembersClient: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawMembersClient + """ + return self._raw_client async def list( self, id: int, *, request_options: typing.Optional[RequestOptions] = None @@ -234,31 +189,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="GET", - request_options=request_options, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - typing.List[MembersListResponseItem], - parse_obj_as( - type_=typing.List[MembersListResponseItem], # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.list(id, request_options=request_options) + return _response.data async def create( - self, - id: int, - *, - user: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> MembersCreateResponse: """ @@ -299,38 +234,11 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="POST", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return typing.cast( - MembersCreateResponse, - parse_obj_as( - type_=MembersCreateResponse, # type: ignore - object_=_response.json(), - ), - ) - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.create(id, user=user, request_options=request_options) + return _response.data async def delete( - self, - id: int, - *, - user: typing.Optional[int] = OMIT, - request_options: typing.Optional[RequestOptions] = None, + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None ) -> None: """ @@ -370,22 +278,5 @@ async def main() -> None: asyncio.run(main()) """ - _response = await self._client_wrapper.httpx_client.request( - f"api/workspaces/{jsonable_encoder(id)}/memberships", - method="DELETE", - json={ - "user": user, - }, - headers={ - "content-type": "application/json", - }, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) + _response = await self._raw_client.delete(id, user=user, request_options=request_options) + return _response.data diff --git a/src/label_studio_sdk/workspaces/members/raw_client.py b/src/label_studio_sdk/workspaces/members/raw_client.py new file mode 100644 index 000000000..0bb64c622 --- /dev/null +++ b/src/label_studio_sdk/workspaces/members/raw_client.py @@ -0,0 +1,290 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.http_response import AsyncHttpResponse, HttpResponse +from ...core.jsonable_encoder import jsonable_encoder +from ...core.pydantic_utilities import parse_obj_as +from ...core.request_options import RequestOptions +from .types.members_create_response import MembersCreateResponse +from .types.members_list_response_item import MembersListResponseItem + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawMembersClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[typing.List[MembersListResponseItem]]: + """ + + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[MembersListResponseItem]] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[MembersCreateResponse]: + """ + + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[MembersCreateResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[None]: + """ + + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawMembersClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[MembersListResponseItem]]: + """ + + List all workspace memberships for a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[MembersListResponseItem]] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[MembersListResponseItem], + parse_obj_as( + type_=typing.List[MembersListResponseItem], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[MembersCreateResponse]: + """ + + Create a new workspace membership. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[MembersCreateResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="POST", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + MembersCreateResponse, + parse_obj_as( + type_=MembersCreateResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, user: typing.Optional[int] = OMIT, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific workspace membership. You will need to provide the workspace ID and the user ID. You can find this using [List workspace memberships](list). + + Parameters + ---------- + id : int + Workspace ID + + user : typing.Optional[int] + User ID of the workspace member + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}/memberships", + method="DELETE", + json={ + "user": user, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/src/label_studio_sdk/workspaces/members/types/__init__.py b/src/label_studio_sdk/workspaces/members/types/__init__.py index b6f51dbc8..b1aa23227 100644 --- a/src/label_studio_sdk/workspaces/members/types/__init__.py +++ b/src/label_studio_sdk/workspaces/members/types/__init__.py @@ -1,5 +1,7 @@ # This file was auto-generated by Fern from our API Definition. +# isort: skip_file + from .members_create_response import MembersCreateResponse from .members_list_response_item import MembersListResponseItem diff --git a/src/label_studio_sdk/workspaces/members/types/members_create_response.py b/src/label_studio_sdk/workspaces/members/types/members_create_response.py index 13ed6d9ba..38c11f695 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_create_response.py +++ b/src/label_studio_sdk/workspaces/members/types/members_create_response.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class MembersCreateResponse(UniversalBaseModel): diff --git a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py index d436b704e..0428f3e9a 100644 --- a/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py +++ b/src/label_studio_sdk/workspaces/members/types/members_list_response_item.py @@ -1,9 +1,9 @@ # This file was auto-generated by Fern from our API Definition. -from ....core.pydantic_utilities import UniversalBaseModel import typing + import pydantic -from ....core.pydantic_utilities import IS_PYDANTIC_V2 +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel class MembersListResponseItem(UniversalBaseModel): diff --git a/src/label_studio_sdk/workspaces/raw_client.py b/src/label_studio_sdk/workspaces/raw_client.py new file mode 100644 index 000000000..e3dfdb351 --- /dev/null +++ b/src/label_studio_sdk/workspaces/raw_client.py @@ -0,0 +1,561 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from json.decoder import JSONDecodeError + +from ..core.api_error import ApiError +from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..core.request_options import RequestOptions +from ..types.workspace import Workspace + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawWorkspacesClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[typing.List[Workspace]]: + """ + + List all workspaces for your organization. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[typing.List[Workspace]] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Workspace]: + """ + + Create a new workspace. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Workspace] + + """ + _response = self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[Workspace]: + """ + + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Workspace] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]: + """ + + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[None] + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return HttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[Workspace]: + """ + + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[Workspace] + + """ + _response = self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawWorkspacesClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[typing.List[Workspace]]: + """ + + List all workspaces for your organization. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[typing.List[Workspace]] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + typing.List[Workspace], + parse_obj_as( + type_=typing.List[Workspace], # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def create( + self, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Workspace]: + """ + + Create a new workspace. + + Workspaces in Label Studio let you organize your projects and users into separate spaces. This is useful for managing different teams, departments, or projects within your organization. + + For more information, see [Workspaces in Label Studio](https://docs.humansignal.com/guide/workspaces). + + Parameters + ---------- + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Workspace] + + """ + _response = await self._client_wrapper.httpx_client.request( + "api/workspaces", + method="POST", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[Workspace]: + """ + + Get information about a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Workspace] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def delete( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[None]: + """ + + Delete a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[None] + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return AsyncHttpResponse(response=_response, data=None) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + async def update( + self, + id: int, + *, + title: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + is_public: typing.Optional[bool] = OMIT, + is_personal: typing.Optional[bool] = OMIT, + color: typing.Optional[str] = OMIT, + is_archived: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[Workspace]: + """ + + Update a specific workspace. You will need to provide the workspace ID. You can find this using [List workspaces](list). + + Parameters + ---------- + id : int + Workspace ID + + title : typing.Optional[str] + Workspace title + + description : typing.Optional[str] + Workspace description + + is_public : typing.Optional[bool] + Is workspace public + + is_personal : typing.Optional[bool] + Is workspace personal + + color : typing.Optional[str] + Workspace color in HEX format + + is_archived : typing.Optional[bool] + Is workspace archived + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[Workspace] + + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/workspaces/{jsonable_encoder(id)}", + method="PATCH", + json={ + "title": title, + "description": description, + "is_public": is_public, + "is_personal": is_personal, + "color": color, + "is_archived": is_archived, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + Workspace, + parse_obj_as( + type_=Workspace, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index dedf0c76b..000000000 --- a/tests/conftest.py +++ /dev/null @@ -1,22 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -import os -import pytest -from label_studio_sdk import AsyncLabelStudio - - -@pytest.fixture -def client() -> LabelStudio: - return LabelStudio( - api_key=os.getenv("ENV_API_KEY", "api_key"), - base_url=os.getenv("TESTS_BASE_URL", "base_url"), - ) - - -@pytest.fixture -def async_client() -> AsyncLabelStudio: - return AsyncLabelStudio( - api_key=os.getenv("ENV_API_KEY", "api_key"), - base_url=os.getenv("TESTS_BASE_URL", "base_url"), - ) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 73f811f5e..ab04ce639 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -4,4 +4,4 @@ # Get started with writing tests with pytest at https://docs.pytest.org @pytest.mark.skip(reason="Unimplemented") def test_client() -> None: - assert True == True + assert True diff --git a/tests/export_storage/__init__.py b/tests/export_storage/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/export_storage/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/export_storage/test_azure.py b/tests/export_storage/test_azure.py deleted file mode 100644 index 0b645b804..000000000 --- a/tests/export_storage/test_azure.py +++ /dev/null @@ -1,251 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - }, - ) - response = client.export_storage.azure.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.export_storage.azure.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.azure.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.export_storage.azure.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.azure.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.azure.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_gcs.py b/tests/export_storage/test_gcs.py deleted file mode 100644 index 7c6f7a22b..000000000 --- a/tests/export_storage/test_gcs.py +++ /dev/null @@ -1,251 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - }, - ) - response = client.export_storage.gcs.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.export_storage.gcs.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.gcs.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.export_storage.gcs.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.gcs.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.gcs.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_local.py b/tests/export_storage/test_local.py deleted file mode 100644 index f39bae76b..000000000 --- a/tests/export_storage/test_local.py +++ /dev/null @@ -1,225 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - }, - ) - response = client.export_storage.local.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.export_storage.local.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.local.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.export_storage.local.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "project": "integer", - } - response = client.export_storage.local.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.local.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_redis.py b/tests/export_storage/test_redis.py deleted file mode 100644 index 32e207ab9..000000000 --- a/tests/export_storage/test_redis.py +++ /dev/null @@ -1,261 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "db": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "db": "integer", - "project": "integer", - } - }, - ) - response = client.export_storage.redis.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "db": 1, - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "db": "integer", - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.export_storage.redis.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "db": "integer", - "project": "integer", - } - response = client.export_storage.redis.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "db": 1, - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "db": "integer", - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.export_storage.redis.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "db": "integer", - "project": "integer", - } - response = client.export_storage.redis.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.redis.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_s3.py b/tests/export_storage/test_s3.py deleted file mode 100644 index ee8909b6d..000000000 --- a/tests/export_storage/test_s3.py +++ /dev/null @@ -1,291 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - }, - ) - response = client.export_storage.s3.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.export_storage.s3.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "can_delete_objects": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "can_delete_objects": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.export_storage.s3.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "can_delete_objects": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "can_delete_objects": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_s3s.py b/tests/export_storage/test_s3s.py deleted file mode 100644 index f9efabd90..000000000 --- a/tests/export_storage/test_s3s.py +++ /dev/null @@ -1,175 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - }, - ) - response = client.export_storage.s3s.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3s.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3s.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "external_id": "external_id", - "role_arn": "role_arn", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "external_id": None, - "role_arn": None, - "region_name": None, - "s3_endpoint": None, - "project": "integer", - } - response = client.export_storage.s3s.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.s3s.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.export_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.export_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) diff --git a/tests/import_storage/__init__.py b/tests/import_storage/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/import_storage/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/import_storage/test_azure.py b/tests/import_storage/test_azure.py deleted file mode 100644 index da52ad2c2..000000000 --- a/tests/import_storage/test_azure.py +++ /dev/null @@ -1,269 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - }, - ) - response = client.import_storage.azure.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.import_storage.azure.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.azure.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.azure.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.azure.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "container": "container", - "prefix": "prefix", - "account_name": "account_name", - "account_key": "account_key", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "container": None, - "prefix": None, - "account_name": None, - "account_key": None, - } - response = client.import_storage.azure.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "container": "container", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "account_name": "account_name", - "account_key": "account_key", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "container": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "account_name": None, - "account_key": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.azure.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.azure.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_gcs.py b/tests/import_storage/test_gcs.py deleted file mode 100644 index e44d38d91..000000000 --- a/tests/import_storage/test_gcs.py +++ /dev/null @@ -1,269 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - }, - ) - response = client.import_storage.gcs.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.import_storage.gcs.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.gcs.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.gcs.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.gcs.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "google_application_credentials": None, - "google_project_id": None, - } - response = client.import_storage.gcs.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "google_application_credentials": "google_application_credentials", - "google_project_id": "google_project_id", - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "presign_ttl": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "google_application_credentials": None, - "google_project_id": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "presign_ttl": "integer", - "project": "integer", - } - response = client.import_storage.gcs.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.gcs.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_local.py b/tests/import_storage/test_local.py deleted file mode 100644 index 75f0bff65..000000000 --- a/tests/import_storage/test_local.py +++ /dev/null @@ -1,219 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "project": "integer", - } - }, - ) - response = client.import_storage.local.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.import_storage.local.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.local.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "project": "integer", - } - response = client.import_storage.local.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.local.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "project": "integer", - "path": None, - "regex_filter": None, - "use_blob_urls": None, - } - response = client.import_storage.local.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "project": "integer", - } - response = client.import_storage.local.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.local.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_redis.py b/tests/import_storage/test_redis.py deleted file mode 100644 index 01cd0696e..000000000 --- a/tests/import_storage/test_redis.py +++ /dev/null @@ -1,255 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "db": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "db": "integer", - "project": "integer", - } - }, - ) - response = client.import_storage.redis.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.import_storage.redis.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.redis.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "db": "integer", - "project": "integer", - } - response = client.import_storage.redis.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.redis.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "title": "title", - "description": "description", - "project": 1, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "title": None, - "description": None, - "project": "integer", - "path": None, - "host": None, - "port": None, - "password": None, - } - response = client.import_storage.redis.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "path": "path", - "host": "host", - "port": "port", - "password": "password", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "db": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "path": None, - "host": None, - "port": None, - "password": None, - "regex_filter": None, - "use_blob_urls": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "db": "integer", - "project": "integer", - } - response = client.import_storage.redis.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.redis.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_s3.py b/tests/import_storage/test_s3.py deleted file mode 100644 index 3b706703d..000000000 --- a/tests/import_storage/test_s3.py +++ /dev/null @@ -1,319 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - }, - ) - response = client.import_storage.s3.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "recursive_scan": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "recursive_scan": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.import_storage.s3.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "regex_filter": "regex_filter", - "use_blob_urls": True, - "presign": True, - "presign_ttl": 1, - "recursive_scan": True, - "title": "title", - "description": "description", - "project": 1, - "bucket": "bucket", - "prefix": "prefix", - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - } - expected_types: typing.Any = { - "regex_filter": None, - "use_blob_urls": None, - "presign": None, - "presign_ttl": "integer", - "recursive_scan": None, - "title": None, - "description": None, - "project": "integer", - "bucket": None, - "prefix": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - } - response = client.import_storage.s3.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "type": "type", - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "aws_access_key_id": "aws_access_key_id", - "aws_secret_access_key": "aws_secret_access_key", - "aws_session_token": "aws_session_token", - "aws_sse_kms_key_id": "aws_sse_kms_key_id", - "region_name": "region_name", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "type": None, - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "aws_access_key_id": None, - "aws_secret_access_key": None, - "aws_session_token": None, - "aws_sse_kms_key_id": None, - "region_name": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_s3s.py b/tests/import_storage/test_s3s.py deleted file mode 100644 index 96ba46e7a..000000000 --- a/tests/import_storage/test_s3s.py +++ /dev/null @@ -1,329 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - }, - ) - response = client.import_storage.s3s.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3s.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.import_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.import_storage.s3s.validate() # type: ignore[func-returns-value] - is None - ) - - -async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "synchronizable": True, - "presign": True, - "last_sync": "2024-01-15T09:30:00Z", - "last_sync_count": 1, - "last_sync_job": "last_sync_job", - "status": "initialized", - "traceback": "traceback", - "meta": {"key": "value"}, - "title": "title", - "description": "description", - "created_at": "2024-01-15T09:30:00Z", - "bucket": "bucket", - "prefix": "prefix", - "regex_filter": "regex_filter", - "use_blob_urls": True, - "region_name": "region_name", - "external_id": "external_id", - "role_arn": "role_arn", - "s3_endpoint": "s3_endpoint", - "presign_ttl": 1, - "recursive_scan": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "synchronizable": None, - "presign": None, - "last_sync": "datetime", - "last_sync_count": "integer", - "last_sync_job": None, - "status": None, - "traceback": None, - "meta": ("dict", {0: (None, None)}), - "title": None, - "description": None, - "created_at": "datetime", - "bucket": None, - "prefix": None, - "regex_filter": None, - "use_blob_urls": None, - "region_name": None, - "external_id": None, - "role_arn": None, - "s3_endpoint": None, - "presign_ttl": "integer", - "recursive_scan": None, - "project": "integer", - } - response = client.import_storage.s3s.sync(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.s3s.sync(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/projects/__init__.py b/tests/projects/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/projects/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/projects/test_exports.py b/tests/projects/test_exports.py deleted file mode 100644 index 42b059812..000000000 --- a/tests/projects/test_exports.py +++ /dev/null @@ -1,265 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_formats(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "name": "JSON", - "title": "title", - "description": "description", - "link": "link", - "tags": ["tags"], - "disabled": True, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "name": None, - "title": None, - "description": None, - "link": None, - "tags": ("list", {0: None}), - "disabled": None, - } - }, - ) - response = client.projects.exports.list_formats(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.list_formats(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "title": "title", - "id": 1, - "created_by": { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "email": "email", - "avatar": "avatar", - }, - "created_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "status": "created", - "md5": "md5", - "counters": {"key": "value"}, - "converted_formats": [{"export_type": "export_type"}], - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "title": None, - "id": "integer", - "created_by": { - "id": "integer", - "first_name": None, - "last_name": None, - "email": None, - "avatar": None, - }, - "created_at": "datetime", - "finished_at": "datetime", - "status": None, - "md5": None, - "counters": ("dict", {0: (None, None)}), - "converted_formats": ("list", {0: {"export_type": None}}), - } - }, - ) - response = client.projects.exports.list(project_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.list(project_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "id": 1, - "created_by": { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "email": "email", - "avatar": "avatar", - }, - "created_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "status": "created", - "md5": "md5", - "counters": {"key": "value"}, - "converted_formats": [ - { - "id": 1, - "status": "created", - "export_type": "export_type", - "traceback": "traceback", - } - ], - "task_filter_options": { - "view": 1, - "skipped": "skipped", - "finished": "finished", - "annotated": "annotated", - "only_with_annotations": True, - }, - "annotation_filter_options": { - "usual": True, - "ground_truth": True, - "skipped": True, - }, - "serialization_options": { - "drafts": {"only_id": True}, - "predictions": {"only_id": True}, - "include_annotation_history": True, - "annotations__completed_by": {"only_id": True}, - "interpolate_key_frames": True, - }, - } - expected_types: typing.Any = { - "title": None, - "id": "integer", - "created_by": { - "id": "integer", - "first_name": None, - "last_name": None, - "email": None, - "avatar": None, - }, - "created_at": "datetime", - "finished_at": "datetime", - "status": None, - "md5": None, - "counters": ("dict", {0: (None, None)}), - "converted_formats": ( - "list", - { - 0: { - "id": "integer", - "status": None, - "export_type": None, - "traceback": None, - } - }, - ), - "task_filter_options": { - "view": "integer", - "skipped": None, - "finished": None, - "annotated": None, - "only_with_annotations": None, - }, - "annotation_filter_options": { - "usual": None, - "ground_truth": None, - "skipped": None, - }, - "serialization_options": { - "drafts": {"only_id": None}, - "predictions": {"only_id": None}, - "include_annotation_history": None, - "annotations__completed_by": {"only_id": None}, - "interpolate_key_frames": None, - }, - } - response = client.projects.exports.create(project_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.create(project_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "id": 1, - "created_by": { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "email": "email", - "avatar": "avatar", - }, - "created_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "status": "created", - "md5": "md5", - "counters": {"key": "value"}, - "converted_formats": [ - { - "id": 1, - "status": "created", - "export_type": "export_type", - "traceback": "traceback", - } - ], - } - expected_types: typing.Any = { - "title": None, - "id": "integer", - "created_by": { - "id": "integer", - "first_name": None, - "last_name": None, - "email": None, - "avatar": None, - }, - "created_at": "datetime", - "finished_at": "datetime", - "status": None, - "md5": None, - "counters": ("dict", {0: (None, None)}), - "converted_formats": ( - "list", - { - 0: { - "id": "integer", - "status": None, - "export_type": None, - "traceback": None, - } - }, - ), - } - response = client.projects.exports.get(project_id=1, export_pk="export_pk") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.get(project_id=1, export_pk="export_pk") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.projects.exports.delete(project_id=1, export_pk="export_pk") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.projects.exports.delete(project_id=1, export_pk="export_pk") # type: ignore[func-returns-value] - is None - ) - - -async def test_convert(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"export_type": "JSON", "converted_format": 1} - expected_types: typing.Any = {"export_type": None, "converted_format": "integer"} - response = client.projects.exports.convert(project_id=1, export_pk="export_pk") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.exports.convert(project_id=1, export_pk="export_pk") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/projects/test_pauses.py b/tests/projects/test_pauses.py deleted file mode 100644 index 5bf4ad0c3..000000000 --- a/tests/projects/test_pauses.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - }, - ) - response = client.projects.pauses.list(project_pk=1, user_pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.list(project_pk=1, user_pk=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - response = client.projects.pauses.create(project_pk=1, user_pk=1, reason="reason") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.create(project_pk=1, user_pk=1, reason="reason") - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - response = client.projects.pauses.get(project_pk=1, user_pk=1, id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.get(project_pk=1, user_pk=1, id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.projects.pauses.delete(project_pk=1, user_pk=1, id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.projects.pauses.delete(project_pk=1, user_pk=1, id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "project": 1, - "user": 1, - "paused_by": 1, - "reason": "reason", - "verbose_reason": "verbose_reason", - "deleted_by": 1, - "deleted_at": "2024-01-15T09:30:00Z", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "project": "integer", - "user": "integer", - "paused_by": "integer", - "reason": None, - "verbose_reason": None, - "deleted_by": "integer", - "deleted_at": "datetime", - "created_at": "datetime", - "updated_at": "datetime", - } - response = client.projects.pauses.update(project_pk=1, user_pk=1, id=1, reason="reason") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.pauses.update(project_pk=1, user_pk=1, id=1, reason="reason") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/__init__.py b/tests/prompts/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/prompts/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/prompts/test_indicators.py b/tests/prompts/test_indicators.py deleted file mode 100644 index 8167c72e1..000000000 --- a/tests/prompts/test_indicators.py +++ /dev/null @@ -1,47 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "key": "key", - "title": "title", - "main_kpi": "main_kpi", - "secondary_kpi": "secondary_kpi", - "additional_kpis": [{}], - "extra_kpis": [{}], - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "key": None, - "title": None, - "main_kpi": None, - "secondary_kpi": None, - "additional_kpis": ("list", {0: {}}), - "extra_kpis": ("list", {0: {}}), - } - }, - ) - response = client.prompts.indicators.list(pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.indicators.list(pk=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"title": "title", "values": {"key": "value"}} - expected_types: typing.Any = {"title": None, "values": ("dict", {0: (None, None)})} - response = client.prompts.indicators.get(indicator_key="indicator_key", pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.indicators.get(indicator_key="indicator_key", pk=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/test_runs.py b/tests/prompts/test_runs.py deleted file mode 100644 index a3fe40ce5..000000000 --- a/tests/prompts/test_runs.py +++ /dev/null @@ -1,74 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "organization": 1, - "project": 1, - "model_version": 1, - "created_by": 1, - "project_subset": "All", - "status": "Pending", - "job_id": "job_id", - "created_at": "2024-01-15T09:30:00Z", - "triggered_at": "2024-01-15T09:30:00Z", - "predictions_updated_at": "2024-01-15T09:30:00Z", - "completed_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "organization": "integer", - "project": "integer", - "model_version": "integer", - "created_by": "integer", - "project_subset": None, - "status": None, - "job_id": None, - "created_at": "datetime", - "triggered_at": "datetime", - "predictions_updated_at": "datetime", - "completed_at": "datetime", - } - response = client.prompts.runs.list(id=1, version_id=1, project=1, project_subset="All") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.runs.list(id=1, version_id=1, project=1, project_subset="All") - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "organization": 1, - "project": 1, - "model_version": 1, - "created_by": 1, - "project_subset": "All", - "status": "Pending", - "job_id": "job_id", - "created_at": "2024-01-15T09:30:00Z", - "triggered_at": "2024-01-15T09:30:00Z", - "predictions_updated_at": "2024-01-15T09:30:00Z", - "completed_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "organization": "integer", - "project": "integer", - "model_version": "integer", - "created_by": "integer", - "project_subset": None, - "status": None, - "job_id": None, - "created_at": "datetime", - "triggered_at": "datetime", - "predictions_updated_at": "datetime", - "completed_at": "datetime", - } - response = client.prompts.runs.create(id=1, version_id=1, project=1, project_subset="All") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.runs.create(id=1, version_id=1, project=1, project_subset="All") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/prompts/test_versions.py b/tests/prompts/test_versions.py deleted file mode 100644 index 584a55e84..000000000 --- a/tests/prompts/test_versions.py +++ /dev/null @@ -1,280 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - }, - ) - response = client.prompts.versions.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - expected_types: typing.Any = { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - response = client.prompts.versions.create(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.create(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - expected_types: typing.Any = { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - response = client.prompts.versions.get(id=1, version_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.get(id=1, version_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.prompts.versions.delete(id=1, version_id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - } - expected_types: typing.Any = { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - } - response = client.prompts.versions.update(id=1, version_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.update(id=1, version_id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_cost_estimate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "prompt_cost_usd": "prompt_cost_usd", - "completion_cost_usd": "completion_cost_usd", - "total_cost_usd": "total_cost_usd", - "is_error": True, - "error_type": "error_type", - "error_message": "error_message", - } - expected_types: typing.Any = { - "prompt_cost_usd": None, - "completion_cost_usd": None, - "total_cost_usd": None, - "is_error": None, - "error_type": None, - "error_message": None, - } - response = client.prompts.versions.cost_estimate(prompt_id=1, version_id=1, project_id=1, project_subset=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.cost_estimate( - prompt_id=1, version_id=1, project_id=1, project_subset=1 - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get_refined_prompt(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "reasoning": "reasoning", - "prompt": "prompt", - "refinement_job_id": "refinement_job_id", - "refinement_status": "Pending", - "total_cost": "total_cost", - "previous_version": { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - }, - } - expected_types: typing.Any = { - "title": None, - "reasoning": None, - "prompt": None, - "refinement_job_id": None, - "refinement_status": None, - "total_cost": None, - "previous_version": { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - }, - } - response = client.prompts.versions.get_refined_prompt( - prompt_id=1, version_id=1, refinement_job_id="refinement_job_id" - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.get_refined_prompt( - prompt_id=1, version_id=1, refinement_job_id="refinement_job_id" - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_refine_prompt(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "reasoning": "reasoning", - "prompt": "prompt", - "refinement_job_id": "refinement_job_id", - "refinement_status": "Pending", - "total_cost": "total_cost", - "previous_version": { - "title": "title", - "parent_model": 1, - "model_provider_connection": 1, - "prompt": "prompt", - "provider": "OpenAI", - "provider_model_id": "provider_model_id", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - }, - } - expected_types: typing.Any = { - "title": None, - "reasoning": None, - "prompt": None, - "refinement_job_id": None, - "refinement_status": None, - "total_cost": None, - "previous_version": { - "title": None, - "parent_model": "integer", - "model_provider_connection": "integer", - "prompt": None, - "provider": None, - "provider_model_id": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - }, - } - response = client.prompts.versions.refine_prompt(prompt_id=1, version_id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.versions.refine_prompt(prompt_id=1, version_id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_actions.py b/tests/test_actions.py deleted file mode 100644 index e34997d2b..000000000 --- a/tests/test_actions.py +++ /dev/null @@ -1,65 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -from label_studio_sdk.actions import ActionsCreateRequestFilters -from label_studio_sdk.actions import ActionsCreateRequestFiltersItemsItem -from label_studio_sdk.actions import ActionsCreateRequestSelectedItemsExcluded - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.actions.list() # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.actions.list() # type: ignore[func-returns-value] - is None - ) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.actions.create( - id="retrieve_tasks_predictions", - project=1, - filters=ActionsCreateRequestFilters( - conjunction="or", - items=[ - ActionsCreateRequestFiltersItemsItem( - filter="filter:tasks:id", - operator="greater", - type="Number", - value=123, - ) - ], - ), - selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), - ordering=["tasks:total_annotations"], - ) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.actions.create( - id="retrieve_tasks_predictions", - project=1, - filters=ActionsCreateRequestFilters( - conjunction="or", - items=[ - ActionsCreateRequestFiltersItemsItem( - filter="filter:tasks:id", - operator="greater", - type="Number", - value=123, - ) - ], - ), - selected_items=ActionsCreateRequestSelectedItemsExcluded(all_=True, excluded=[124, 125, 126]), - ordering=["tasks:total_annotations"], - ) # type: ignore[func-returns-value] - is None - ) diff --git a/tests/test_annotations.py b/tests/test_annotations.py deleted file mode 100644 index 43493c8a3..000000000 --- a/tests/test_annotations.py +++ /dev/null @@ -1,467 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - response = client.annotations.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.annotations.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.annotations.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - response = client.annotations.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - }, - ) - response = client.annotations.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": 1, - "unique_id": "unique_id", - "was_cancelled": False, - "ground_truth": False, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 10, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "created_username": None, - "created_ago": None, - "completed_by": "integer", - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - response = client.annotations.create( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.create( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - was_cancelled=False, - ground_truth=True, - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_create_bulk(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"id": 1}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - {0: {"id": "integer"}}, - ) - response = client.annotations.create_bulk() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.annotations.create_bulk() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_comments.py b/tests/test_comments.py deleted file mode 100644 index 509abf4ee..000000000 --- a/tests/test_comments.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - }, - ) - response = client.comments.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - response = client.comments.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - response = client.comments.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.comments.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.comments.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "text": "text", - "project": 1, - "task": 1, - "annotation": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_resolved": True, - "resolved_at": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "text": None, - "project": "integer", - "task": "integer", - "annotation": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_resolved": None, - "resolved_at": "datetime", - } - response = client.comments.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.comments.update(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_export_storage.py b/tests/test_export_storage.py deleted file mode 100644 index b15334bac..000000000 --- a/tests/test_export_storage.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"name": "name", "title": "title"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - {0: {"name": None, "title": None}}, - ) - response = client.export_storage.list_types() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.export_storage.list_types() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_files.py b/tests/test_files.py deleted file mode 100644 index 7bf2e7fde..000000000 --- a/tests/test_files.py +++ /dev/null @@ -1,78 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"id": 1, "file": "file"} - expected_types: typing.Any = {"id": "integer", "file": None} - response = client.files.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.files.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.files.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.files.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"id": 1, "file": "file"} - expected_types: typing.Any = {"id": "integer", "file": None} - response = client.files.update(id_=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.files.update(id_=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"id": 1, "file": "file"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - {0: {"id": "integer", "file": None}}, - ) - response = client.files.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.files.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete_many(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.files.delete_many(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.files.delete_many(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_download(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.files.download(filename="filename") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.files.download(filename="filename") # type: ignore[func-returns-value] - is None - ) diff --git a/tests/test_import_storage.py b/tests/test_import_storage.py deleted file mode 100644 index 1a4f25768..000000000 --- a/tests/test_import_storage.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_types(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"name": "name", "title": "title"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - {0: {"name": None, "title": None}}, - ) - response = client.import_storage.list_types() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.import_storage.list_types() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_jwt_settings.py b/tests/test_jwt_settings.py deleted file mode 100644 index e17aa3d14..000000000 --- a/tests/test_jwt_settings.py +++ /dev/null @@ -1,44 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "api_tokens_enabled": True, - "legacy_api_tokens_enabled": True, - "api_token_ttl_days": 1, - } - expected_types: typing.Any = { - "api_tokens_enabled": None, - "legacy_api_tokens_enabled": None, - "api_token_ttl_days": "integer", - } - response = client.jwt_settings.get() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.jwt_settings.get() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "api_tokens_enabled": True, - "legacy_api_tokens_enabled": True, - "api_token_ttl_days": 1, - } - expected_types: typing.Any = { - "api_tokens_enabled": None, - "legacy_api_tokens_enabled": None, - "api_token_ttl_days": "integer", - } - response = client.jwt_settings.create(api_tokens_enabled=True, legacy_api_tokens_enabled=True, api_token_ttl_days=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.jwt_settings.create( - api_tokens_enabled=True, legacy_api_tokens_enabled=True, api_token_ttl_days=1 - ) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_ml.py b/tests/test_ml.py deleted file mode 100644 index 0a5badc22..000000000 --- a/tests/test_ml.py +++ /dev/null @@ -1,229 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "state": "CO", - "readable_state": "readable_state", - "is_interactive": True, - "url": "url", - "error_message": "error_message", - "title": "title", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "basic_auth_pass_is_set": "basic_auth_pass_is_set", - "description": "description", - "extra_params": {"key": "value"}, - "model_version": "model_version", - "timeout": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "auto_update": True, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "state": None, - "readable_state": None, - "is_interactive": None, - "url": None, - "error_message": None, - "title": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "basic_auth_pass_is_set": None, - "description": None, - "extra_params": ("dict", {0: (None, None)}), - "model_version": None, - "timeout": None, - "created_at": "datetime", - "updated_at": "datetime", - "auto_update": None, - "project": "integer", - } - }, - ) - response = client.ml.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "url": "url", - "project": 1, - "is_interactive": True, - "title": "title", - "description": "description", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "extra_params": {"key": "value"}, - "timeout": 1, - } - expected_types: typing.Any = { - "url": None, - "project": "integer", - "is_interactive": None, - "title": None, - "description": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "extra_params": ("dict", {0: (None, None)}), - "timeout": "integer", - } - response = client.ml.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "state": "CO", - "readable_state": "readable_state", - "is_interactive": True, - "url": "url", - "error_message": "error_message", - "title": "title", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "basic_auth_pass_is_set": "basic_auth_pass_is_set", - "description": "description", - "extra_params": {"key": "value"}, - "model_version": "model_version", - "timeout": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "auto_update": True, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "state": None, - "readable_state": None, - "is_interactive": None, - "url": None, - "error_message": None, - "title": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "basic_auth_pass_is_set": None, - "description": None, - "extra_params": ("dict", {0: (None, None)}), - "model_version": None, - "timeout": None, - "created_at": "datetime", - "updated_at": "datetime", - "auto_update": None, - "project": "integer", - } - response = client.ml.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "url": "url", - "project": 1, - "is_interactive": True, - "title": "title", - "description": "description", - "auth_method": "NONE", - "basic_auth_user": "basic_auth_user", - "basic_auth_pass": "basic_auth_pass", - "extra_params": {"key": "value"}, - "timeout": 1, - } - expected_types: typing.Any = { - "url": None, - "project": "integer", - "is_interactive": None, - "title": None, - "description": None, - "auth_method": None, - "basic_auth_user": None, - "basic_auth_pass": None, - "extra_params": ("dict", {0: (None, None)}), - "timeout": "integer", - } - response = client.ml.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.ml.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_predict_interactive(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.predict_interactive(id=1, task=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_train(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.train(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.train(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_list_model_versions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.ml.list_model_versions(id="id") # type: ignore[func-returns-value] - is None - ) diff --git a/tests/test_model_providers.py b/tests/test_model_providers.py deleted file mode 100644 index 353ad2339..000000000 --- a/tests/test_model_providers.py +++ /dev/null @@ -1,194 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - }, - ) - response = client.model_providers.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - expected_types: typing.Any = { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - response = client.model_providers.create(provider="OpenAI") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.create(provider="OpenAI") - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - expected_types: typing.Any = { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - response = client.model_providers.get(pk=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.get(pk=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.model_providers.delete(pk=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.model_providers.delete(pk=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "provider": "OpenAI", - "api_key": "api_key", - "deployment_name": "deployment_name", - "endpoint": "endpoint", - "scope": "Organization", - "organization": 1, - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "is_internal": True, - "budget_limit": 1.1, - "budget_last_reset_date": "2024-01-15T09:30:00Z", - "budget_reset_period": "Monthly", - "budget_total_spent": 1.1, - "budget_alert_threshold": 1.1, - } - expected_types: typing.Any = { - "provider": None, - "api_key": None, - "deployment_name": None, - "endpoint": None, - "scope": None, - "organization": "integer", - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "is_internal": None, - "budget_limit": None, - "budget_last_reset_date": "datetime", - "budget_reset_period": None, - "budget_total_spent": None, - "budget_alert_threshold": None, - } - response = client.model_providers.update(pk=1, provider="OpenAI") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.model_providers.update(pk=1, provider="OpenAI") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_predictions.py b/tests/test_predictions.py deleted file mode 100644 index f8559d6b3..000000000 --- a/tests/test_predictions.py +++ /dev/null @@ -1,404 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - }, - ) - response = client.predictions.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - response = client.predictions.create( - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.create( - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - response = client.predictions.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.predictions.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.predictions.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "result": [ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - "model_version": "yolo-v8", - "created_ago": "created_ago", - "score": 0.95, - "cluster": 1, - "neighbors": {"key": "value"}, - "mislabeling": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "model": 1, - "model_run": 1, - "task": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "result": ( - "list", - { - 0: ( - "dict", - { - 0: (None, None), - 1: (None, None), - 2: (None, None), - 3: (None, None), - 4: (None, None), - 5: (None, None), - 6: (None, None), - }, - ) - }, - ), - "model_version": None, - "created_ago": None, - "score": None, - "cluster": "integer", - "neighbors": ("dict", {0: (None, None)}), - "mislabeling": None, - "created_at": "datetime", - "updated_at": "datetime", - "model": "integer", - "model_run": "integer", - "task": "integer", - "project": "integer", - } - response = client.predictions.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.predictions.update( - id=1, - result=[ - { - "original_width": 1920, - "original_height": 1080, - "image_rotation": 0, - "from_name": "bboxes", - "to_name": "image", - "type": "rectanglelabels", - "value": { - "x": 20, - "y": 30, - "width": 50, - "height": 60, - "rotation": 0, - "values": {"rectanglelabels": ["Person"]}, - }, - } - ], - score=0.95, - model_version="yolo-v8", - ) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_projects.py b/tests/test_projects.py deleted file mode 100644 index 61fb9793b..000000000 --- a/tests/test_projects.py +++ /dev/null @@ -1,296 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "My project", - "description": "My first project", - "label_config": "[...]", - "expert_instruction": "Label all cats", - "show_instruction": True, - "show_skip_button": True, - "enable_empty_annotation": True, - "show_annotation_history": True, - "reveal_preannotations_interactively": True, - "show_collab_predictions": True, - "maximum_annotations": 1, - "color": "color", - "control_weights": { - "my_bbox": { - "type": "RectangleLabels", - "labels": {"Car": 1, "Airplaine": 0.5}, - "overall": 0.33, - } - }, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "label_config": None, - "expert_instruction": None, - "show_instruction": None, - "show_skip_button": None, - "enable_empty_annotation": None, - "show_annotation_history": None, - "reveal_preannotations_interactively": None, - "show_collab_predictions": None, - "maximum_annotations": "integer", - "color": None, - "control_weights": ("dict", {0: (None, None)}), - } - response = client.projects.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "My project", - "description": "My first project", - "label_config": "[...]", - "expert_instruction": "Label all cats", - "show_instruction": True, - "show_skip_button": True, - "enable_empty_annotation": True, - "show_annotation_history": True, - "organization": 1, - "prompts": [ - { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - ], - "color": "#FF0000", - "maximum_annotations": 1, - "annotation_limit_count": 10, - "annotation_limit_percent": 50, - "is_published": True, - "model_version": "1.0.0", - "is_draft": False, - "created_by": { - "id": 1, - "first_name": "Jo", - "last_name": "Doe", - "email": "manager@humansignal.com", - "avatar": "avatar", - }, - "created_at": "2023-08-24T14:15:22Z", - "min_annotations_to_start_training": 0, - "start_training_on_annotation_update": True, - "show_collab_predictions": True, - "num_tasks_with_annotations": 10, - "task_number": 100, - "useful_annotation_number": 10, - "ground_truth_number": 5, - "skipped_annotations_number": 0, - "total_annotations_number": 10, - "total_predictions_number": 0, - "sampling": "Sequential sampling", - "show_ground_truth_first": True, - "show_overlap_first": True, - "overlap_cohort_percentage": 100, - "task_data_login": "user", - "task_data_password": "secret", - "control_weights": {"key": "value"}, - "parsed_label_config": {"key": "value"}, - "evaluate_predictions_automatically": False, - "config_has_control_tags": True, - "skip_queue": "REQUEUE_FOR_ME", - "reveal_preannotations_interactively": True, - "pinned_at": "2023-08-24T14:15:22Z", - "finished_task_number": 10, - "queue_total": 10, - "queue_done": 100, - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "label_config": None, - "expert_instruction": None, - "show_instruction": None, - "show_skip_button": None, - "enable_empty_annotation": None, - "show_annotation_history": None, - "organization": "integer", - "prompts": ( - "list", - { - 0: { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - }, - ), - "color": None, - "maximum_annotations": "integer", - "annotation_limit_count": "integer", - "annotation_limit_percent": None, - "is_published": None, - "model_version": None, - "is_draft": None, - "created_by": { - "id": "integer", - "first_name": None, - "last_name": None, - "email": None, - "avatar": None, - }, - "created_at": "datetime", - "min_annotations_to_start_training": "integer", - "start_training_on_annotation_update": None, - "show_collab_predictions": None, - "num_tasks_with_annotations": "integer", - "task_number": "integer", - "useful_annotation_number": "integer", - "ground_truth_number": "integer", - "skipped_annotations_number": "integer", - "total_annotations_number": "integer", - "total_predictions_number": "integer", - "sampling": None, - "show_ground_truth_first": None, - "show_overlap_first": None, - "overlap_cohort_percentage": "integer", - "task_data_login": None, - "task_data_password": None, - "control_weights": ("dict", {0: (None, None)}), - "parsed_label_config": ("dict", {0: (None, None)}), - "evaluate_predictions_automatically": None, - "config_has_control_tags": None, - "skip_queue": None, - "reveal_preannotations_interactively": None, - "pinned_at": "datetime", - "finished_task_number": "integer", - "queue_total": "integer", - "queue_done": "integer", - } - response = client.projects.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.projects.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.projects.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "My project", - "description": "My first project", - "label_config": "[...]", - "expert_instruction": "Label all cats", - "show_instruction": True, - "show_skip_button": True, - "enable_empty_annotation": True, - "show_annotation_history": True, - "reveal_preannotations_interactively": True, - "show_collab_predictions": True, - "maximum_annotations": 1, - "annotation_limit_count": 1, - "annotation_limit_percent": 1.1, - "color": "color", - "control_weights": { - "my_bbox": { - "type": "RectangleLabels", - "labels": {"Car": 1, "Airplaine": 0.5}, - "overall": 0.33, - } - }, - } - expected_types: typing.Any = { - "title": None, - "description": None, - "label_config": None, - "expert_instruction": None, - "show_instruction": None, - "show_skip_button": None, - "enable_empty_annotation": None, - "show_annotation_history": None, - "reveal_preannotations_interactively": None, - "show_collab_predictions": None, - "maximum_annotations": "integer", - "annotation_limit_count": "integer", - "annotation_limit_percent": None, - "color": None, - "control_weights": ("dict", {0: (None, None)}), - } - response = client.projects.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.update(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_import_tasks(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "task_count": 1, - "annotation_count": 1, - "predictions_count": 1, - "duration": 1.1, - "file_upload_ids": [1], - "could_be_tasks_list": True, - "found_formats": ["found_formats"], - "data_columns": ["data_columns"], - } - expected_types: typing.Any = { - "task_count": "integer", - "annotation_count": "integer", - "predictions_count": "integer", - "duration": None, - "file_upload_ids": ("list", {0: "integer"}), - "could_be_tasks_list": None, - "found_formats": ("list", {0: None}), - "data_columns": ("list", {0: None}), - } - response = client.projects.import_tasks(id=1, request=[{"key": "value"}]) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.import_tasks(id=1, request=[{"key": "value"}]) - validate_response(async_response, expected_response, expected_types) - - -async def test_validate_config(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"label_config": "label_config"} - expected_types: typing.Any = {"label_config": None} - response = client.projects.validate_config(id=1, label_config="label_config") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.projects.validate_config(id=1, label_config="label_config") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_prompts.py b/tests/test_prompts.py deleted file mode 100644 index 7b13a67ff..000000000 --- a/tests/test_prompts.py +++ /dev/null @@ -1,186 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - }, - ) - response = client.prompts.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - expected_types: typing.Any = { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - response = client.prompts.create(title="title", input_fields=["input_fields"], output_classes=["output_classes"]) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.create( - title="title", input_fields=["input_fields"], output_classes=["output_classes"] - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - expected_types: typing.Any = { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - response = client.prompts.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.prompts.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.prompts.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "title": "title", - "description": "description", - "created_by": 1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "organization": 1, - "input_fields": ["input_fields"], - "output_classes": ["output_classes"], - "associated_projects": [1], - "skill_name": "skill_name", - } - expected_types: typing.Any = { - "title": None, - "description": None, - "created_by": "integer", - "created_at": "datetime", - "updated_at": "datetime", - "organization": "integer", - "input_fields": ("list", {0: None}), - "output_classes": ("list", {0: None}), - "associated_projects": ("list", {0: "integer"}), - "skill_name": None, - } - response = client.prompts.update( - id=1, - title="title", - input_fields=["input_fields"], - output_classes=["output_classes"], - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.update( - id=1, - title="title", - input_fields=["input_fields"], - output_classes=["output_classes"], - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_batch_predictions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"detail": "detail"} - expected_types: typing.Any = {"detail": None} - response = client.prompts.batch_predictions() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.batch_predictions() - validate_response(async_response, expected_response, expected_types) - - -async def test_batch_failed_predictions(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"detail": "detail"} - expected_types: typing.Any = {"detail": None} - response = client.prompts.batch_failed_predictions() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.prompts.batch_failed_predictions() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_tasks.py b/tests/test_tasks.py deleted file mode 100644 index 3b16590c9..000000000 --- a/tests/test_tasks.py +++ /dev/null @@ -1,353 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_create_many_status(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "preannotated_from_fields": {"key": "value"}, - "commit_to_project": True, - "return_task_ids": True, - "status": "created", - "url": "url", - "traceback": "traceback", - "error": "error", - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "finished_at": "2024-01-15T09:30:00Z", - "task_count": 1, - "annotation_count": 1, - "prediction_count": 1, - "duration": 1, - "file_upload_ids": {"key": "value"}, - "could_be_tasks_list": True, - "found_formats": {"key": "value"}, - "data_columns": {"key": "value"}, - "tasks": {"key": "value"}, - "task_ids": {"key": "value"}, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "preannotated_from_fields": ("dict", {0: (None, None)}), - "commit_to_project": None, - "return_task_ids": None, - "status": None, - "url": None, - "traceback": None, - "error": None, - "created_at": "datetime", - "updated_at": "datetime", - "finished_at": "datetime", - "task_count": "integer", - "annotation_count": "integer", - "prediction_count": "integer", - "duration": "integer", - "file_upload_ids": ("dict", {0: (None, None)}), - "could_be_tasks_list": None, - "found_formats": ("dict", {0: (None, None)}), - "data_columns": ("dict", {0: (None, None)}), - "tasks": ("dict", {0: (None, None)}), - "task_ids": ("dict", {0: (None, None)}), - "project": "integer", - } - response = client.tasks.create_many_status(id=1, import_pk="import_pk") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.create_many_status(id=1, import_pk="import_pk") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete_all_tasks(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.tasks.delete_all_tasks(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "data": {"image": "https://example.com/image.jpg", "text": "Hello, AI!"}, - "meta": {"key": "value"}, - "created_at": "2024-06-18T23:45:46Z", - "updated_at": "2024-06-18T23:45:46Z", - "is_labeled": False, - "overlap": 1, - "inner_id": 1, - "total_annotations": 0, - "cancelled_annotations": 0, - "total_predictions": 0, - "comment_count": 0, - "unresolved_comment_count": 0, - "last_comment_updated_at": "2024-01-15T09:30:00Z", - "project": 1, - "updated_by": [{"user_id": 1}], - "file_upload": "42d46c4c-my-pic.jpeg", - "comment_authors": [1], - } - expected_types: typing.Any = { - "id": "integer", - "data": ("dict", {0: (None, None), 1: (None, None)}), - "meta": ("dict", {0: (None, None)}), - "created_at": "datetime", - "updated_at": "datetime", - "is_labeled": None, - "overlap": "integer", - "inner_id": "integer", - "total_annotations": "integer", - "cancelled_annotations": "integer", - "total_predictions": "integer", - "comment_count": "integer", - "unresolved_comment_count": "integer", - "last_comment_updated_at": "datetime", - "project": "integer", - "updated_by": ("list", {0: ("dict", {0: (None, None)})}), - "file_upload": None, - "comment_authors": ("list", {0: "integer"}), - } - response = client.tasks.create( - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.create( - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, - ) - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 13, - "predictions": [ - { - "result": [{"key": "value"}], - "score": 1.1, - "model_version": "model_version", - "model": {"key": "value"}, - "model_run": {"key": "value"}, - "task": 1, - "project": 1.1, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - ], - "annotations": [ - { - "id": 1, - "result": [{"key": "value"}], - "created_username": "created_username", - "created_ago": "created_ago", - "completed_by": {"key": "value"}, - "unique_id": "unique_id", - "was_cancelled": True, - "ground_truth": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "draft_created_at": "2024-01-15T09:30:00Z", - "lead_time": 1.1, - "import_id": 1, - "last_action": "prediction", - "task": 1, - "project": 1, - "updated_by": 1, - "parent_prediction": 1, - "parent_annotation": 1, - "last_created_by": 1, - } - ], - "drafts": [ - { - "result": [{"key": "value"}], - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - } - ], - "annotators": [1], - "inner_id": 2, - "cancelled_annotations": 0, - "total_annotations": 0, - "total_predictions": 0, - "annotations_results": "", - "predictions_results": "", - "file_upload": "6b25fc23-some_3.mp4", - "annotations_ids": "", - "predictions_model_versions": "", - "draft_exists": False, - "updated_by": [{"key": "value"}], - "data": {"image": "/data/upload/1/6b25fc23-some_3.mp4"}, - "meta": {"key": "value"}, - "created_at": "2024-06-18T23:45:46Z", - "updated_at": "2024-06-18T23:45:46Z", - "is_labeled": False, - "overlap": 1, - "comment_count": 0, - "unresolved_comment_count": 0, - "project": 1, - "comment_authors": [1], - } - expected_types: typing.Any = { - "id": "integer", - "predictions": ( - "list", - { - 0: { - "result": ("list", {0: ("dict", {0: (None, None)})}), - "score": None, - "model_version": None, - "model": ("dict", {0: (None, None)}), - "model_run": ("dict", {0: (None, None)}), - "task": "integer", - "project": None, - "created_at": "datetime", - "updated_at": "datetime", - } - }, - ), - "annotations": ( - "list", - { - 0: { - "id": "integer", - "result": ("list", {0: ("dict", {0: (None, None)})}), - "created_username": None, - "created_ago": None, - "completed_by": ("dict", {0: (None, None)}), - "unique_id": None, - "was_cancelled": None, - "ground_truth": None, - "created_at": "datetime", - "updated_at": "datetime", - "draft_created_at": "datetime", - "lead_time": None, - "import_id": "integer", - "last_action": None, - "task": "integer", - "project": "integer", - "updated_by": "integer", - "parent_prediction": "integer", - "parent_annotation": "integer", - "last_created_by": "integer", - } - }, - ), - "drafts": ( - "list", - { - 0: { - "result": ("list", {0: ("dict", {0: (None, None)})}), - "created_at": "datetime", - "updated_at": "datetime", - } - }, - ), - "annotators": ("list", {0: "integer"}), - "inner_id": "integer", - "cancelled_annotations": "integer", - "total_annotations": "integer", - "total_predictions": "integer", - "annotations_results": None, - "predictions_results": None, - "file_upload": None, - "annotations_ids": None, - "predictions_model_versions": None, - "draft_exists": None, - "updated_by": ("list", {0: ("dict", {0: (None, None)})}), - "data": ("dict", {0: (None, None)}), - "meta": ("dict", {0: (None, None)}), - "created_at": "datetime", - "updated_at": "datetime", - "is_labeled": None, - "overlap": "integer", - "comment_count": "integer", - "unresolved_comment_count": "integer", - "project": "integer", - "comment_authors": ("list", {0: "integer"}), - } - response = client.tasks.get(id="id") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.get(id="id") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.tasks.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.tasks.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "data": {"image": "https://example.com/image.jpg", "text": "Hello, AI!"}, - "meta": {"key": "value"}, - "created_at": "2024-06-18T23:45:46Z", - "updated_at": "2024-06-18T23:45:46Z", - "is_labeled": False, - "overlap": 1, - "inner_id": 1, - "total_annotations": 0, - "cancelled_annotations": 0, - "total_predictions": 0, - "comment_count": 0, - "unresolved_comment_count": 0, - "last_comment_updated_at": "2024-01-15T09:30:00Z", - "project": 1, - "updated_by": [{"user_id": 1}], - "file_upload": "42d46c4c-my-pic.jpeg", - "comment_authors": [1], - } - expected_types: typing.Any = { - "id": "integer", - "data": ("dict", {0: (None, None), 1: (None, None)}), - "meta": ("dict", {0: (None, None)}), - "created_at": "datetime", - "updated_at": "datetime", - "is_labeled": None, - "overlap": "integer", - "inner_id": "integer", - "total_annotations": "integer", - "cancelled_annotations": "integer", - "total_predictions": "integer", - "comment_count": "integer", - "unresolved_comment_count": "integer", - "last_comment_updated_at": "datetime", - "project": "integer", - "updated_by": ("list", {0: ("dict", {0: (None, None)})}), - "file_upload": None, - "comment_authors": ("list", {0: "integer"}), - } - response = client.tasks.update( - id="id", - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, - ) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tasks.update( - id="id", - data={"image": "https://example.com/image.jpg", "text": "Hello, world!"}, - project=1, - ) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_tokens.py b/tests/test_tokens.py deleted file mode 100644 index 706651d59..000000000 --- a/tests/test_tokens.py +++ /dev/null @@ -1,66 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_blacklist(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.tokens.blacklist(refresh="refresh") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.tokens.blacklist(refresh="refresh") # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"token": "token", "created_at": "created_at", "expires_at": "expires_at"}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - {0: {"token": None, "created_at": None, "expires_at": None}}, - ) - response = client.tokens.get() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.get() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "token": "token", - "created_at": "created_at", - "expires_at": "expires_at", - } - expected_types: typing.Any = {"token": None, "created_at": None, "expires_at": None} - response = client.tokens.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_refresh(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"access": "access"} - expected_types: typing.Any = {"access": None} - response = client.tokens.refresh(refresh="refresh") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.refresh(refresh="refresh") - validate_response(async_response, expected_response, expected_types) - - -async def test_rotate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"refresh": "refresh"} - expected_types: typing.Any = {"refresh": None} - response = client.tokens.rotate(refresh="refresh") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.tokens.rotate(refresh="refresh") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_users.py b/tests/test_users.py deleted file mode 100644 index e6e5582b0..000000000 --- a/tests/test_users.py +++ /dev/null @@ -1,226 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_reset_token(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"token": "token"} - expected_types: typing.Any = {"token": None} - response = client.users.reset_token() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.reset_token() - validate_response(async_response, expected_response, expected_types) - - -async def test_get_token(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"detail": "detail"} - expected_types: typing.Any = {"detail": None} - response = client.users.get_token() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.get_token() - validate_response(async_response, expected_response, expected_types) - - -async def test_whoami(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.whoami() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.whoami() - validate_response(async_response, expected_response, expected_types) - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - }, - ) - response = client.users.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.users.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.users.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "first_name": "first_name", - "last_name": "last_name", - "username": "username", - "email": "email", - "last_activity": "2024-01-15T09:30:00Z", - "avatar": "avatar", - "initials": "initials", - "phone": "phone", - "active_organization": 1, - "allow_newsletters": True, - "date_joined": "2024-01-15T09:30:00Z", - } - expected_types: typing.Any = { - "id": "integer", - "first_name": None, - "last_name": None, - "username": None, - "email": None, - "last_activity": "datetime", - "avatar": None, - "initials": None, - "phone": None, - "active_organization": "integer", - "allow_newsletters": None, - "date_joined": "datetime", - } - response = client.users.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.users.update(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_versions.py b/tests/test_versions.py deleted file mode 100644 index 16e3f4d37..000000000 --- a/tests/test_versions.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "release": "release", - "label-studio-os-package": {"key": "value"}, - "label-studio-os-backend": {"key": "value"}, - "label-studio-frontend": {"key": "value"}, - "dm2": {"key": "value"}, - "label-studio-converter": {"key": "value"}, - "edition": "Community", - "lsf": {"key": "value"}, - "backend": {"key": "value"}, - } - expected_types: typing.Any = { - "release": None, - "label-studio-os-package": ("dict", {0: (None, None)}), - "label-studio-os-backend": ("dict", {0: (None, None)}), - "label-studio-frontend": ("dict", {0: (None, None)}), - "dm2": ("dict", {0: (None, None)}), - "label-studio-converter": ("dict", {0: (None, None)}), - "edition": None, - "lsf": ("dict", {0: (None, None)}), - "backend": ("dict", {0: (None, None)}), - } - response = client.versions.get() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.versions.get() - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_views.py b/tests/test_views.py deleted file mode 100644 index b6dc97106..000000000 --- a/tests/test_views.py +++ /dev/null @@ -1,178 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ( - "list", - {0: {"column": None, "type": None, "operator": None}}, - ), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - }, - ) - response = client.views.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - response = client.views.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_delete_all(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.views.delete_all(project=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.views.delete_all(project=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - response = client.views.get(id="id") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.get(id="id") - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.views.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.views.delete(id="id") # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "filter_group": { - "id": 1, - "filters": [{"column": "column", "type": "type", "operator": "operator"}], - "conjunction": "conjunction", - }, - "data": {"key": "value"}, - "ordering": {"key": "value"}, - "selected_items": {"key": "value"}, - "user": 1, - "project": 1, - } - expected_types: typing.Any = { - "id": "integer", - "filter_group": { - "id": "integer", - "filters": ("list", {0: {"column": None, "type": None, "operator": None}}), - "conjunction": None, - }, - "data": ("dict", {0: (None, None)}), - "ordering": ("dict", {0: (None, None)}), - "selected_items": ("dict", {0: (None, None)}), - "user": "integer", - "project": "integer", - } - response = client.views.update(id="id") - validate_response(response, expected_response, expected_types) - - async_response = await async_client.views.update(id="id") - validate_response(async_response, expected_response, expected_types) diff --git a/tests/test_workspaces.py b/tests/test_workspaces.py deleted file mode 100644 index dc43be593..000000000 --- a/tests/test_workspaces.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from .utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [ - { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - ] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - { - 0: { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - }, - ) - response = client.workspaces.list() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.list() - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - response = client.workspaces.create() - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.create() - validate_response(async_response, expected_response, expected_types) - - -async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - response = client.workspaces.get(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.get(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.workspaces.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.workspaces.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - -async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = { - "id": 1, - "title": "title", - "description": "description", - "is_public": True, - "is_personal": True, - "is_archived": True, - "created_at": "2024-01-15T09:30:00Z", - "updated_at": "2024-01-15T09:30:00Z", - "created_by": 1, - "color": "color", - } - expected_types: typing.Any = { - "id": "integer", - "title": None, - "description": None, - "is_public": None, - "is_personal": None, - "is_archived": None, - "created_at": "datetime", - "updated_at": "datetime", - "created_by": "integer", - "color": None, - } - response = client.workspaces.update(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.update(id=1) - validate_response(async_response, expected_response, expected_types) diff --git a/tests/utilities.py b/tests/utilities.py deleted file mode 100644 index 3d228806a..000000000 --- a/tests/utilities.py +++ /dev/null @@ -1,162 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import uuid - -from dateutil import parser - -import pydantic - - -def cast_field(json_expectation: typing.Any, type_expectation: typing.Any) -> typing.Any: - # Cast these specific types which come through as string and expect our - # models to cast to the correct type. - if type_expectation == "uuid": - return uuid.UUID(json_expectation) - elif type_expectation == "date": - return parser.parse(json_expectation).date() - elif type_expectation == "datetime": - return parser.parse(json_expectation) - elif type_expectation == "set": - return set(json_expectation) - elif type_expectation == "integer": - # Necessary as we allow numeric keys, but JSON makes them strings - return int(json_expectation) - - return json_expectation - - -def validate_field(response: typing.Any, json_expectation: typing.Any, type_expectation: typing.Any) -> None: - # Allow for an escape hatch if the object cannot be validated - if type_expectation == "no_validate": - return - - is_container_of_complex_type = False - # Parse types in containers, note that dicts are handled within `validate_response` - if isinstance(json_expectation, list): - if isinstance(type_expectation, tuple): - container_expectation = type_expectation[0] - contents_expectation = type_expectation[1] - - cast_json_expectation = [] - for idx, ex in enumerate(json_expectation): - if isinstance(contents_expectation, dict): - entry_expectation = contents_expectation.get(idx) - if isinstance(entry_expectation, dict): - is_container_of_complex_type = True - validate_response( - response=response[idx], - json_expectation=ex, - type_expectations=entry_expectation, - ) - else: - cast_json_expectation.append(cast_field(ex, entry_expectation)) - else: - cast_json_expectation.append(ex) - json_expectation = cast_json_expectation - - # Note that we explicitly do not allow for sets of pydantic models as they are not hashable, so - # if any of the values of the set have a type_expectation of a dict, we're assuming it's a pydantic - # model and keeping it a list. - if container_expectation != "set" or not any( - map( - lambda value: isinstance(value, dict), - list(contents_expectation.values()), - ) - ): - json_expectation = cast_field(json_expectation, container_expectation) - elif isinstance(type_expectation, tuple): - container_expectation = type_expectation[0] - contents_expectation = type_expectation[1] - if isinstance(contents_expectation, dict): - json_expectation = { - cast_field( - key, - contents_expectation.get(idx)[0] # type: ignore - if contents_expectation.get(idx) is not None - else None, - ): cast_field( - value, - contents_expectation.get(idx)[1] # type: ignore - if contents_expectation.get(idx) is not None - else None, - ) - for idx, (key, value) in enumerate(json_expectation.items()) - } - else: - json_expectation = cast_field(json_expectation, container_expectation) - elif type_expectation is not None: - json_expectation = cast_field(json_expectation, type_expectation) - - # When dealing with containers of models, etc. we're validating them implicitly, so no need to check the resultant list - if not is_container_of_complex_type: - assert ( - json_expectation == response - ), "Primitives found, expected: {0} (type: {1}), Actual: {2} (type: {3})".format( - json_expectation, type(json_expectation), response, type(response) - ) - - -# Arg type_expectations is a deeply nested structure that matches the response, but with the values replaced with the expected types -def validate_response(response: typing.Any, json_expectation: typing.Any, type_expectations: typing.Any) -> None: - # Allow for an escape hatch if the object cannot be validated - if type_expectations == "no_validate": - return - - if ( - not isinstance(response, list) - and not isinstance(response, dict) - and not issubclass(type(response), pydantic.BaseModel) - ): - validate_field( - response=response, - json_expectation=json_expectation, - type_expectation=type_expectations, - ) - return - - if isinstance(response, list): - assert len(response) == len(json_expectation), "Length mismatch, expected: {0}, Actual: {1}".format( - len(response), len(json_expectation) - ) - content_expectation = type_expectations - if isinstance(type_expectations, tuple): - content_expectation = type_expectations[1] - for idx, item in enumerate(response): - validate_response( - response=item, - json_expectation=json_expectation[idx], - type_expectations=content_expectation[idx], - ) - else: - response_json = response - if issubclass(type(response), pydantic.BaseModel): - response_json = response.dict(by_alias=True) - - for key, value in json_expectation.items(): - assert key in response_json, "Field {0} not found within the response object: {1}".format( - key, response_json - ) - - type_expectation = None - if type_expectations is not None and isinstance(type_expectations, dict): - type_expectation = type_expectations.get(key) - - # If your type_expectation is a tuple then you have a container field, process it as such - # Otherwise, we're just validating a single field that's a pydantic model. - if isinstance(value, dict) and not isinstance(type_expectation, tuple): - validate_response( - response=response_json[key], - json_expectation=value, - type_expectations=type_expectation, - ) - else: - validate_field( - response=response_json[key], - json_expectation=value, - type_expectation=type_expectation, - ) - - # Ensure there are no additional fields here either - del response_json[key] - assert len(response_json) == 0, "Additional fields found, expected None: {0}".format(response_json) diff --git a/tests/utils/assets/models/__init__.py b/tests/utils/assets/models/__init__.py index 3a1c852e7..2cf012635 100644 --- a/tests/utils/assets/models/__init__.py +++ b/tests/utils/assets/models/__init__.py @@ -5,7 +5,7 @@ from .circle import CircleParams from .object_with_defaults import ObjectWithDefaultsParams from .object_with_optional_field import ObjectWithOptionalFieldParams -from .shape import ShapeParams, Shape_CircleParams, Shape_SquareParams +from .shape import Shape_CircleParams, Shape_SquareParams, ShapeParams from .square import SquareParams from .undiscriminated_shape import UndiscriminatedShapeParams diff --git a/tests/utils/assets/models/circle.py b/tests/utils/assets/models/circle.py index 09b8e0647..c54c57b40 100644 --- a/tests/utils/assets/models/circle.py +++ b/tests/utils/assets/models/circle.py @@ -3,7 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions -import typing_extensions + from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/object_with_defaults.py b/tests/utils/assets/models/object_with_defaults.py index ef14f7b2c..a977b1d2a 100644 --- a/tests/utils/assets/models/object_with_defaults.py +++ b/tests/utils/assets/models/object_with_defaults.py @@ -3,7 +3,6 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions -import typing_extensions class ObjectWithDefaultsParams(typing_extensions.TypedDict): diff --git a/tests/utils/assets/models/object_with_optional_field.py b/tests/utils/assets/models/object_with_optional_field.py index ee4f9dbfe..802e01ff2 100644 --- a/tests/utils/assets/models/object_with_optional_field.py +++ b/tests/utils/assets/models/object_with_optional_field.py @@ -2,16 +2,17 @@ # This file was auto-generated by Fern from our API Definition. -import typing_extensions -import typing -import typing_extensions -from label_studio_sdk.core.serialization import FieldMetadata import datetime as dt +import typing import uuid + +import typing_extensions from .color import Color from .shape import ShapeParams from .undiscriminated_shape import UndiscriminatedShapeParams +from label_studio_sdk.core.serialization import FieldMetadata + class ObjectWithOptionalFieldParams(typing_extensions.TypedDict): literal: typing.Literal["lit_one"] diff --git a/tests/utils/assets/models/shape.py b/tests/utils/assets/models/shape.py index 820dec7a6..e9d51e32c 100644 --- a/tests/utils/assets/models/shape.py +++ b/tests/utils/assets/models/shape.py @@ -3,9 +3,11 @@ # This file was auto-generated by Fern from our API Definition. from __future__ import annotations -import typing_extensions -import typing_extensions + import typing + +import typing_extensions + from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/square.py b/tests/utils/assets/models/square.py index b70897be3..d9d65afca 100644 --- a/tests/utils/assets/models/square.py +++ b/tests/utils/assets/models/square.py @@ -3,7 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing_extensions -import typing_extensions + from label_studio_sdk.core.serialization import FieldMetadata diff --git a/tests/utils/assets/models/undiscriminated_shape.py b/tests/utils/assets/models/undiscriminated_shape.py index 68876a23c..99f12b300 100644 --- a/tests/utils/assets/models/undiscriminated_shape.py +++ b/tests/utils/assets/models/undiscriminated_shape.py @@ -3,6 +3,7 @@ # This file was auto-generated by Fern from our API Definition. import typing + from .circle import CircleParams from .square import SquareParams diff --git a/tests/utils/test_http_client.py b/tests/utils/test_http_client.py index ba375d043..1a2d02245 100644 --- a/tests/utils/test_http_client.py +++ b/tests/utils/test_http_client.py @@ -14,10 +14,7 @@ def test_get_json_request_body() -> None: assert data_body is None json_body_extras, data_body_extras = get_request_body( - json={"goodbye": "world"}, - data=None, - request_options=get_request_options(), - omit=None, + json={"goodbye": "world"}, data=None, request_options=get_request_options(), omit=None ) assert json_body_extras == {"goodbye": "world", "see you": "later"} @@ -30,10 +27,7 @@ def test_get_files_request_body() -> None: assert json_body is None json_body_extras, data_body_extras = get_request_body( - json=None, - data={"goodbye": "world"}, - request_options=get_request_options(), - omit=None, + json=None, data={"goodbye": "world"}, request_options=get_request_options(), omit=None ) assert data_body_extras == {"goodbye": "world", "see you": "later"} diff --git a/tests/utils/test_query_encoding.py b/tests/utils/test_query_encoding.py index c995dff43..6276c4f4f 100644 --- a/tests/utils/test_query_encoding.py +++ b/tests/utils/test_query_encoding.py @@ -15,26 +15,14 @@ def test_query_encoding_deep_objects() -> None: def test_query_encoding_deep_object_arrays() -> None: - assert encode_query( - { - "objects": [ - {"key": "hello", "value": "world"}, - {"key": "foo", "value": "bar"}, - ] - } - ) == [ + assert encode_query({"objects": [{"key": "hello", "value": "world"}, {"key": "foo", "value": "bar"}]}) == [ ("objects[key]", "hello"), ("objects[value]", "world"), ("objects[key]", "foo"), ("objects[value]", "bar"), ] assert encode_query( - { - "users": [ - {"name": "string", "tags": ["string"]}, - {"name": "string2", "tags": ["string2", "string3"]}, - ] - } + {"users": [{"name": "string", "tags": ["string"]}, {"name": "string2", "tags": ["string2", "string3"]}]} ) == [ ("users[name]", "string"), ("users[tags]", "string"), @@ -46,4 +34,4 @@ def test_query_encoding_deep_object_arrays() -> None: def test_encode_query_with_none() -> None: encoded = encode_query(None) - assert encoded == None + assert encoded is None diff --git a/tests/utils/test_serialization.py b/tests/utils/test_serialization.py index 1f7b0daf8..2654074e2 100644 --- a/tests/utils/test_serialization.py +++ b/tests/utils/test_serialization.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -from typing import List, Any +from typing import Any, List -from label_studio_sdk.core.serialization import convert_and_respect_annotation_metadata -from .assets.models import ShapeParams, ObjectWithOptionalFieldParams +from .assets.models import ObjectWithOptionalFieldParams, ShapeParams +from label_studio_sdk.core.serialization import convert_and_respect_annotation_metadata UNION_TEST: ShapeParams = {"radius_measurement": 1.0, "shape_type": "circle", "id": "1"} UNION_TEST_CONVERTED = {"shapeType": "circle", "radiusMeasurement": 1.0, "id": "1"} @@ -21,51 +21,21 @@ def test_convert_and_respect_annotation_metadata() -> None: converted = convert_and_respect_annotation_metadata( object_=data, annotation=ObjectWithOptionalFieldParams, direction="write" ) - assert converted == { - "string": "string", - "long": 12345, - "bool": True, - "literal": "lit_one", - "any": "any", - } + assert converted == {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"} def test_convert_and_respect_annotation_metadata_in_list() -> None: data: List[ObjectWithOptionalFieldParams] = [ - { - "string": "string", - "long_": 12345, - "bool_": True, - "literal": "lit_one", - "any": "any", - }, - { - "string": "another string", - "long_": 67890, - "list_": [], - "literal": "lit_one", - "any": "any", - }, + {"string": "string", "long_": 12345, "bool_": True, "literal": "lit_one", "any": "any"}, + {"string": "another string", "long_": 67890, "list_": [], "literal": "lit_one", "any": "any"}, ] converted = convert_and_respect_annotation_metadata( object_=data, annotation=List[ObjectWithOptionalFieldParams], direction="write" ) assert converted == [ - { - "string": "string", - "long": 12345, - "bool": True, - "literal": "lit_one", - "any": "any", - }, - { - "string": "another string", - "long": 67890, - "list": [], - "literal": "lit_one", - "any": "any", - }, + {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"}, + {"string": "another string", "long": 67890, "list": [], "literal": "lit_one", "any": "any"}, ] diff --git a/tests/workspaces/__init__.py b/tests/workspaces/__init__.py deleted file mode 100644 index f3ea2659b..000000000 --- a/tests/workspaces/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/tests/workspaces/test_members.py b/tests/workspaces/test_members.py deleted file mode 100644 index be7fcd998..000000000 --- a/tests/workspaces/test_members.py +++ /dev/null @@ -1,42 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from label_studio_sdk import LabelStudio -from label_studio_sdk import AsyncLabelStudio -import typing -from ..utilities import validate_response - - -async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = [{"user": {"key": "value"}}] - expected_types: typing.Tuple[typing.Any, typing.Any] = ( - "list", - {0: {"user": ("dict", {0: (None, None)})}}, - ) - response = client.workspaces.members.list(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.members.list(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - expected_response: typing.Any = {"user": 1} - expected_types: typing.Any = {"user": "integer"} - response = client.workspaces.members.create(id=1) - validate_response(response, expected_response, expected_types) - - async_response = await async_client.workspaces.members.create(id=1) - validate_response(async_response, expected_response, expected_types) - - -async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: - # Type ignore to avoid mypy complaining about the function not being meant to return a value - assert ( - client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] - is None - ) - - assert ( - await async_client.workspaces.members.delete(id=1) # type: ignore[func-returns-value] - is None - )